Ejemplo n.º 1
0
def start_stream():
    global darkice
    global darkice_stderr_queue
    global darkice_stdout_queue
    values = request.json

    alsacap = subprocess.Popen('alsacap -R', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    output, error = alsacap.communicate()

    found_usb = False
    channels = None
    sampling_rate = None
    for line in output.splitlines():
        print line
        if found_usb and 'channel' in line and 'sampling rate' in line:
            print "line:"
            print line.strip()
            channels, sampling_rate = parse_card_info_string(line.strip())
            break
        if 'USB' in line:    # start capturing data
            print "start capturing data"
            found_usb = True

    print(channels)
    print(sampling_rate)

    if channels and sampling_rate:
        darkice_config['audio'].channel.value = max(channels)  # todo fix this max/min
        darkice_config['audio'].sampleRate.value = max(sampling_rate)  # todo fix this max/min

    darkice_config['servers'][0].name.value = unicode(values['name'])
    darkice_config['servers'][0].genre.value = unicode(values['genre'])
    darkice_config['servers'][0].url.value = unicode(values['url'])
    darkice_config['servers'][0].description.value = unicode(values['description'])

    print app_config['recording_folder']
    print unicode(values['name'])

    darkice_config['servers'][0].localDumpFile.value = path.join(app_config['recording_folder'], unicode(values['name'] + '.mp3'))

    try:
        with codecs.open('test.cfg', mode='wb', encoding='utf-8') as config_file:
            config.write_to_file(config_file, darkice_config['general'], darkice_config['audio'], darkice_config['servers'])
            filename = config_file.name
            print filename
    except IOError as e:
        print("there is an error")
        return {'error': 'File not availabe: {}'.format(e)}

    darkice = subprocess.Popen('sudo darkice -c {}'.format(filename), stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)

    # non blocking reading of stdout and stderr for darkice status
    thread_output = Thread(target=read_nonblocking_output, args=(darkice.stdout, darkice_stdout_queue))
    thread_output.daemon = True
    thread_output.start()
    thread_error = Thread(target=read_nonblocking_error, args=(darkice.stderr, darkice_stderr_queue))
    thread_error.daemon = True
    thread_error.start()

    return get_stream_status()
 def _init_config_filepath(self):
     tmp_dir = os.path.join(gettempdir(), 'keras_image_captioning')
     mkdir_p(tmp_dir)
     config_file = NamedTemporaryFile(suffix='.yaml', dir=tmp_dir,
                                      delete=False)
     config_file.close()
     self._config_filepath = config_file.name
     write_to_file(self._config, self._config_filepath)
def main_worker(file_path, uploaded_file_id):

    time.sleep(0.5)
    try:
        if uploaded_file_id == 0:
            db = mysql.connector.Connect(**login_info)
            cursor = db.cursor()
            computed_file_hash = compute_hash(file_path)
            sql_query = "SELECT * FROM upload_file WHERE hash LIKE '" + computed_file_hash + "' AND processed LIKE '0' ORDER BY id DESC"
            cursor.execute(sql_query)
            for row in cursor:
                uploaded_file_id = row[0]
                break

        print("[" + str(uploaded_file_id) + "] New File " + file_path +
              ". Details added to 'upload_file' table")
        config.write_to_file(
            config.log_file_name, "[" + str(uploaded_file_id) + "] New File " +
            file_path + ". Details added to 'upload_file' table")
        start_time = time.time()
        if compare_hash.compare(computed_file_hash, file_path, start_time,
                                uploaded_file_id):
            print("[" + str(uploaded_file_id) +
                  "] No Matches Found based on SHA-256 hash. Time: " + str(
                      round(time.time() -
                            start_time, config.time_digit_precision)))
            config.write_to_file(
                config.log_file_name, "[" + str(uploaded_file_id) +
                "] No Matches Found based on SHA-256 hash. Time: " + str(
                    round(time.time() - start_time,
                          config.time_digit_precision)))
            file_type = magic.from_file(file_path, mime=True)
            if ("png" in str(file_type)) or ("jpeg" in str(file_type)) or (
                    "bmp" in str(file_type)):
                # Send it to PNG Function
                print("[" + str(uploaded_file_id) + "] Image Detected")
                config.write_to_file(
                    config.log_file_name,
                    "[" + str(uploaded_file_id) + "] Image Detected")
                image_after_processing.handle_image(file_path,
                                                    uploaded_file_id,
                                                    computed_file_hash)
            elif "pdf" in file_type:
                # Send it to PDF function
                print("[" + str(uploaded_file_id) + "] PDF Detected")
                config.write_to_file(
                    config.log_file_name,
                    "[" + str(uploaded_file_id) + "] PDF Detected")
                pdf_after_processing.handle_pdf(computed_file_hash, file_path,
                                                uploaded_file_id)
                q1.put(file_path + ";;;" + str(start_time) + ";;;" +
                       str(uploaded_file_id))

            if config.keep_uploaded_files == 0:
                os.remove(file_path)
    except Exception as e:
        print("Error: " + e)
        config.write_to_file(config.error_file_name, e)
Ejemplo n.º 4
0
def compare(current_file_hash, file_path, execution_start_time, uploaded_file_id):
	"""
		This script will check the hash of the newly entered file with the existing hashes from the upload_file table.
		If a similar file is found, the process will be stopped for this file. And details will be added to the log of this file in the upload_file table.
		Input: Newly Uploaded file.
		Output: Rejects the file if hash is not unique.
	"""
	import mysql.connector
	from database import login_info
	import os
	import json
	import config
	import time

	print("[" + str(uploaded_file_id) + "] Checking For Existing Copies within 'upload_file' table")
	config.write_to_file(config.log_file_name,"[" + str(uploaded_file_id) + "] Checking For Existing Copies within 'upload_file' table")
	log_dictionary = {}
	log_dictionary['all_steps'] = "Checking For Existing Copies within 'upload_file' table"

	db = mysql.connector.Connect(**login_info)
	cursor = db.cursor(buffered=True)

	cursor.execute('SELECT * FROM upload_file WHERE processed LIKE "1"')

	for row in cursor:
		db_file_hash = row[4]
		if db_file_hash == current_file_hash:
			os.remove(file_path)
			db2 = mysql.connector.Connect(**login_info)
			cursor_delete = db2.cursor()
			log_dictionary['all_steps'] += 'Rejected. Hash Matches with File Number: ' + str(row[0]) + '\n'
			total_time = str(round(time.time() - execution_start_time,config.time_digit_precision))
			print("[" + str(uploaded_file_id) + "] Rejected. Hash Matches with File Number: " + str(row[0]) + ". Time: " + total_time)
			config.write_to_file(config.log_file_name,"[" + str(uploaded_file_id) + "] Rejected. Hash Matches with File Number: " + str(row[0]) + ". Time: " + total_time)
			log_dictionary['total_time'] = "Time: " + total_time + "\n"
			json_string = json.dumps(log_dictionary)
			cursor_delete.execute('UPDATE upload_file SET log = "' + (str(json_string).replace('"','\\"')).replace("'","\\'") + '", processed = "2" WHERE hash LIKE "' + current_file_hash + '" AND processed LIKE "0"')
			db2.commit()

			cursor_delete.close()
			cursor.close()
			return False

	cursor.close()
	return True
Ejemplo n.º 5
0
 def _write_active_config(self):
     CONFIG_FILENAME = 'hyperparams-config.yaml'
     self._config_filepath = self._path_from_result_dir(CONFIG_FILENAME)
     config.write_to_file(config.active_config(), self._config_filepath)
def compare(filename, uploaded_file_id, page_number, source_width,
            source_height):
    """
		This function compares the currently generated thumbnail with all the other thumbnails from the 'thumbnail' table.
		It compares thumbnails that are generated by the same type of file (PDF or PNG), and with the same dimensions and orientation.
		It also generates the difference PNG and stores it locally and also on the 'thumb_comparison' table.
		Input: Thumnail. 
		Output: Percentage difference between the current thumbnail and the other "compatible" thumbnails in the 'thumbnail' table
	"""

    import mysql.connector
    import config
    from database import login_info
    import os
    import subprocess

    db = mysql.connector.Connect(**login_info)
    cursor = db.cursor()

    def compare_files(mainfile, otherfile):
        pipe = subprocess.Popen("node ImageComparison/compare.js %s %s" %
                                (mainfile, otherfile),
                                shell=True,
                                stdout=subprocess.PIPE).stdout
        output = pipe.read()
        return output.replace("\n", "")

    valid_comparison = "True"

    sql_query1 = """SELECT * FROM thumbnail WHERE dir LIKE '%s'""" % (filename)
    cursor.execute(sql_query1)

    file_id = 0
    file_orientation = ""
    file_generated_by = ""
    for row in cursor:
        file_id = row[0]
        file_orientation = row[4]
        file_generated_by = row[5]
        break
    cursor.close()

    db = mysql.connector.Connect(**login_info)
    cursor = db.cursor()
    sql_query2 = """SELECT * FROM thumbnail WHERE generatedBy LIKE '%s' AND width = '%s' AND height = '%s' AND dir NOT LIKE '%s' AND orientation LIKE '%s' """ % (
        file_generated_by, source_width, source_height, filename,
        file_orientation)
    cursor.execute(sql_query2)
    files_to_compare = list()
    files_to_compare_id = list()
    for row in cursor:
        files_to_compare.append(row[1].encode("utf8"))
        files_to_compare_id.append(row[0])
    cursor.close()
    print("[" + str(uploaded_file_id) + "," + page_number +
          "] Found %d Thumbnails for valid comparison" %
          (len(files_to_compare)))
    config.write_to_file(
        config.log_file_name, "[" + str(uploaded_file_id) + "," + page_number +
        "] Found %d Thumbnails for valid comparison" % (len(files_to_compare)))

    for file_number in range(len(files_to_compare)):
        percentage_difference = compare_files(filename,
                                              files_to_compare[file_number])

        if percentage_difference == "0":
            print("[" + str(uploaded_file_id) + "," + page_number +
                  "] Thumbnail Matches With Thumbnail ID: " +
                  str(files_to_compare_id[file_number]) + ". Aborting")
            config.write_to_file(
                config.log_file_name, "[" + str(uploaded_file_id) + "," +
                page_number + "] Thumbnail Matches With Thumbnail ID: " +
                str(files_to_compare_id[file_number]) + ". Aborting")
            db = mysql.connector.Connect(**login_info)
            cursor = db.cursor()
            sql_query4 = "DELETE FROM thumb_comparison WHERE new_image_id LIKE %s" % (
                file_id)
            cursor.execute(sql_query4)
            db.commit()
            cursor.close()
            db = mysql.connector.Connect(**login_info)
            cursor = db.cursor()
            sql_query5 = "DELETE FROM thumbnail WHERE id = %d" % (file_id)
            cursor.execute(sql_query5)
            db.commit()
            cursor.close()
            print("[" + str(uploaded_file_id) + "," + page_number +
                  "] Deleted database entry for this Thumbnail ID: " +
                  str(file_id))
            config.write_to_file(
                config.log_file_name,
                "[" + str(uploaded_file_id) + "," + page_number +
                "] Deleted database entry for this Thumbnail ID: " +
                str(file_id))
            valid_comparison = "False"
            return valid_comparison, str(files_to_compare_id[file_number])
        current_image_id = files_to_compare_id[file_number]
        db = mysql.connector.Connect(**login_info)
        cursor = db.cursor()

        output_file_name = filename.replace(
            config.store_thumbnails,
            "") + "_" + files_to_compare[file_number].replace(
                config.store_thumbnails, "") + "diff.png"
        os.rename(output_file_name,
                  config.store_comparison_images + output_file_name)

        comparison_blob = "NULL"

        if config.add_comparison_blob_into_db == 1:
            with open(config.store_comparison_images + output_file_name,
                      "rb") as img:
                comparison_blob = img.read()

        # sql_query3 = """INSERT INTO thumb_comparison VALUES(%d, %d, '%s', '%s')""" %(file_id, current_image_id, comparison_blob, str(percentage_difference))
        cursor.execute("INSERT INTO thumb_comparison VALUES(%s, %s, %s, %s)", (
            file_id,
            current_image_id,
            comparison_blob,
            str(percentage_difference),
        ))
        db.commit()
        cursor.close()
    return valid_comparison, "NONE"
def handle_pdf(file_hash, file_name, uploaded_file_id):
	"""
	Updates the database with the document info of the file along with the
	orientation, height and width of the file passed as argument.
	Input: uploaded file
	Output: Update details about the file in 'upload_file' 
	"""
	import sys
	import mysql.connector
	from PyPDF2 import PdfFileReader
	import os
	from database import login_info
	import json
	import config

	inputpdf = PdfFileReader(open(file_name, "rb"))

	document_info = inputpdf.getDocumentInfo()
	xmp_info = inputpdf.getXmpMetadata()
	
	db = mysql.connector.Connect(**login_info)
	cursor=db.cursor()

	w = float(inputpdf.getPage(0).mediaBox.getWidth()) * 0.352
	h = float(inputpdf.getPage(0).mediaBox.getHeight()) * 0.352

	orientation = ""

	if w > h:
		orientation = "Landscape"
	else:
		orientation = "Portrait"

	pdf_info = {}
	fonts = set()
	embedded = set()
	for page in inputpdf.pages:
		obj = page.getObject()
		f, e = walk(obj['/Resources'], fonts, embedded)
		fonts = fonts.union(f)
		embedded = embedded.union(e)

	unembedded = fonts - embedded
	font_list = sorted(list(fonts))

	pdf_info['pages'] = str(inputpdf.numPages)
	pdf_info['orientation'] = orientation
	pdf_info['dimensions'] = {"width": str(w), "height": str(h)}
	pdf_info['fonts'] = font_list

	print(pdf_info)
	
	doc_info = {}
	if document_info!=None:
		for item in document_info:
			doc_info[item] = document_info[item]

	xmp_pdf_info = {}
	if xmp_info!=None:
		for item in xmp_info:
			xmp_pdf_info[item] = xmp_info[item]

	pdf_info['document_information'] = doc_info
	pdf_info['xmp_information'] = xmp_pdf_info

	sql_query = "UPDATE upload_file SET doc_info = '" + (str(json.dumps(pdf_info)).replace('"','\\"')).replace("'","\\'") + "', processed='1' WHERE processed='0' AND hash LIKE '" + file_hash + "';"

	cursor.execute(sql_query)
	db.commit()
	cursor.close()

	print("[" + str(uploaded_file_id) + "] Added PDF details to 'upload_file' table.")
	config.write_to_file(config.log_file_name,"[" + str(uploaded_file_id) + "] Added PDF details to 'upload_file' table.")
def handle_image(file_name, uploaded_file_id, file_hash):
    """
		This function handles the uploaded files that are detected as images.
		Input: Uploaded Image file
		Output: 
	"""
    import magic
    from PIL import Image
    import os
    import json
    import mysql.connector
    from database import login_info
    import config
    import shutil
    import compareCompatibleFiles
    import convert_to_tiles
    import time
    execution_start_time = time.time()
    log_dictionary = {}
    log_dictionary['all_steps'] = "Image Detected\n"
    file_type = magic.from_file(file_name, mime=True)
    if "png" not in file_type:
        conversion_start_time = time.time()
        if not os.path.exists(config.store_converted_images):
            os.makedirs(config.store_converted_images)
        im = Image.open(file_name)
        new_filename = file_name.replace(config.store_main_uploads,
                                         config.store_converted_images)
        new_filename = new_filename.split(".")[0] + ".png"
        im.save(new_filename)
        file_name = new_filename
        conversion_end_time = time.time()
        total_time = str(
            round(conversion_end_time - conversion_start_time,
                  config.time_digit_precision))

        print("[" + str(uploaded_file_id) +
              "] Converting Image to PNG. Time: " + total_time)
        log_dictionary[
            'all_steps'] += "Converting Image to PNG. Time: %s\n" % (
                total_time)
        config.write_to_file(
            config.log_file_name, "[" + str(uploaded_file_id) +
            "] Converting Image to PNG. Time: " + total_time)

    db = mysql.connector.Connect(**login_info)
    cursor = db.cursor()

    img_info = {}

    image = Image.open(file_name)
    source_width, source_height = image.size

    img_info['dimensions'] = {
        "width": str(source_width),
        "height": str(source_height)
    }
    sql_query = 'UPDATE upload_file SET doc_info = "' + (str(
        json.dumps(img_info)
    ).replace('"', '\\"')).replace(
        "'", "\\'"
    ) + '", processed="1" WHERE processed="0" AND hash LIKE "' + file_hash + '";'
    cursor.execute(sql_query)
    db.commit()
    cursor.close()
    print("[" + str(uploaded_file_id) +
          "] Added Image details to 'upload_file' table")
    log_dictionary[
        'all_steps'] += "Added Image details to 'upload_file' table\n"
    config.write_to_file(
        config.log_file_name, "[" + str(uploaded_file_id) +
        "] Added Image details to 'upload_file' table")

    thumbnail_generation_start_time = time.time()
    newHeight = float(config.fixed_thumbnail_height)
    factor = newHeight / source_height
    newWidth = source_width * factor
    image.thumbnail((newWidth, newHeight), Image.ANTIALIAS)
    if not os.path.exists(config.store_thumbnails):
        os.makedirs(config.store_thumbnails)
    if config.store_converted_images in file_name:
        newfile_location = config.store_thumbnails + file_name.replace(
            config.store_converted_images, "") + ".png"
    else:
        newfile_location = config.store_thumbnails + file_name.replace(
            config.store_main_uploads, "") + ".png"
    image.save(newfile_location)

    thumbnail_blob = "NULL"
    generatedBy = "PNG"
    orientation = ""

    if config.add_thumb_blob_into_db == 1:
        with open(newfile_location, "rb") as img:
            thumbnail_blob = img.read()

    if source_width > source_height:
        orientation = "Landscape"
    else:
        orientation = "Portrait"

    db = mysql.connector.Connect(**login_info)
    cursor = db.cursor()
    cursor.execute(
        "INSERT INTO `thumbnail` VALUES (NULL, %s, %s, %s, %s, %s, %s)", (
            newfile_location,
            str(source_width),
            str(source_height),
            orientation,
            generatedBy,
            thumbnail_blob,
        ))
    db.commit()
    cursor.close()
    thumbnail_generation_end_time = time.time()
    total_time = str(
        round(thumbnail_generation_end_time - thumbnail_generation_start_time,
              config.time_digit_precision))
    print(
        "[" + str(uploaded_file_id) +
        "] Thumbnail Generation Complete. Added Details to 'thumbnail' table. Time: "
        + total_time)
    log_dictionary[
        'all_steps'] += "Thumbnail Generation Complete. Added Details to 'thumbnail' table. Time: " + total_time + "\n"
    config.write_to_file(
        config.log_file_name, "[" + str(uploaded_file_id) +
        "] Thumbnail Generation Complete. Added Details to 'thumbnail' table. Time: "
        + total_time)

    comparison_start_time = time.time()
    checkVariable, similarThumbnailID = compareCompatibleFiles.compare(
        newfile_location, uploaded_file_id, "NULL", source_width,
        source_height)
    comparison_end_time = time.time()
    total_time = str(
        round(comparison_end_time - comparison_start_time,
              config.time_digit_precision))
    print(
        "[" + str(uploaded_file_id) +
        '] Thumbnails Compared. Comparison Details added to \'thumb_comparison\' table. Time: '
        + total_time)
    log_dictionary[
        'all_steps'] += 'Thumbnails Compared. Comparison Details added to \'thumb_comparison\' table. Time: ' + total_time + "\n"
    config.write_to_file(
        config.log_file_name, "[" + str(uploaded_file_id) +
        '] Thumbnails Compared. Comparison Details added to \'thumb_comparison\' table. Time: '
        + total_time)

    if checkVariable == "True":
        high_res_start_time = time.time()
        png_blob = ""
        if config.store_converted_images in file_name:
            shutil.copy(
                file_name,
                file_name.replace(config.store_converted_images,
                                  config.store_high_res_images))
        else:
            shutil.copy(
                file_name,
                file_name.replace(config.store_main_uploads,
                                  config.store_high_res_images))
        if config.add_high_res_png_into_db == 1:
            with open(file_name, "rb") as img:
                png_blob = img.read()
            # print("[" + str(uploaded_file_id) + "," + str(page_number) + "] High Resolution PNG Generated. Details Added to 'image' table. Time: " + total_time)
        db = mysql.connector.Connect(**login_info)
        cursor = db.cursor()
        sql_query = "SELECT * FROM `thumbnail` WHERE dir LIKE '" + newfile_location + "'"
        cursor.execute(sql_query)
        thumbnail_id = 0
        for row in cursor:
            thumbnail_id = row[0]
            break
        cursor.close()

        db = mysql.connector.Connect(**login_info)
        cursor = db.cursor()
        cursor.execute(
            "INSERT INTO `image` VALUES (NULL, NULL, NULL, %s, %s, %s, %s, %s, %s, %s, %s);",
            (
                uploaded_file_id,
                str(thumbnail_id),
                "NULL",
                png_blob,
                "NULL",
                str(source_width),
                str(source_height),
                "",
            ))
        db.commit()
        high_res_end_time = time.time()
        total_time = str(
            round(high_res_end_time - high_res_start_time,
                  config.time_digit_precision))
        print(
            "[" + str(uploaded_file_id) +
            "] High Resolution PNG Generated. Details Added to 'image' table. Time: "
            + total_time)
        log_dictionary[
            'all_steps'] += "High Resolution PNG Generated. Details Added to 'image' table. Time: " + total_time + "\n"
        config.write_to_file(
            config.log_file_name, "[" + str(uploaded_file_id) +
            "] High Resolution PNG Generated. Details Added to 'image' table. Time: "
            + total_time)

        db = mysql.connector.Connect(**login_info)
        cursor = db.cursor()
        sql_query = """SELECT * FROM `image` WHERE upload_file_id LIKE '%s' AND page_number LIKE '%s'""" % (
            uploaded_file_id, "")
        cursor.execute(sql_query)
        current_image_id = 0
        for row in cursor:
            current_image_id = row[0]
            break
        cursor.close()
        tiles_start_time = time.time()
        log_dictionary = convert_to_tiles.generate_tiles(
            file_name, current_image_id, log_dictionary, "NULL",
            uploaded_file_id, tiles_start_time)
    else:
        # print("[" + str(uploaded_file_id) + '] Thumbnails Compared. Comparison Details added to \'thumb_comparison\' table.')
        log_dictionary[
            'all_steps'] += 'Thumbnail matches with Thumbnail ID: ' + similarThumbnailID + '\n'
        # Dont convert, abort process
    if config.keep_converted_images == 0 and config.store_converted_images in file_name:
        os.remove(file_name)
    log_dictionary['total_time'] = str(time.time() - execution_start_time)
    sql_query = "UPDATE upload_file SET log = '" + (str(
        json.dumps(log_dictionary)).replace('"', '\\"')).replace(
            "'",
            "\\'") + "' WHERE hash = '" + file_hash + "' AND processed = '1'"
    db = mysql.connector.Connect(**login_info)
    cursor = db.cursor()
    cursor.execute(sql_query)
    db.commit()
    cursor.close()
def convert(filename, file_width, file_height, page_number, pdf_location,
            file_hash, execution_start_time, log_dictionary, uploaded_file_id,
            process_start_time, thumbnail_id):
    """
		This function converts a single Page PDF into a high resolution image and adds the details into the 'image' table
		Input: A Single Page PDF/Image
		Output: A High Resolution Image
	"""

    from wand.image import Image
    from wand.color import Color
    import os
    from database import login_info
    import mysql.connector
    import config
    from PIL import Image as pilImage
    import convert_to_tiles
    import time

    db = mysql.connector.Connect(**login_info)
    cursor = db.cursor()
    png_blob = "NULL"
    pdf_blob = "NULL"
    if not os.path.exists(config.store_high_res_images):
        os.makedirs(config.store_high_res_images)
    with Image(filename=filename, resolution=config.high_res_value) as img:
        with Image(width=img.width,
                   height=img.height,
                   background=Color("white")) as bg:
            bg.composite(img, 0, 0)
            bg.save(filename=config.store_high_res_images +
                    filename.replace(config.store_split_pdfs, "") + ".png")

    im = pilImage.open(config.store_high_res_images +
                       filename.replace(config.store_split_pdfs, "") + ".png")
    img_width, img_height = im.size

    page_size = str(file_width) + "," + str(file_height)

    if config.add_high_res_png_into_db == 1:
        with open(
                config.store_high_res_images +
                filename.replace(config.store_split_pdfs, "") + ".png",
                "rb") as img:
            png_blob = img.read()

    if config.add_split_pdf_into_database == 1:
        with open(pdf_location, "rb") as pdf:
            pdf_blob = pdf.read()

    # sql_query = """INSERT INTO `image` (`id`, `album_id`, `user_id`, `upload_file_id`, `thumbnail_id`,`page_size`, `png`, `pdf`, `width`, `height`, `page_number`) VALUES (NULL, NULL, NULL, '""" + upload_file_id + """', '""" + str(thumbnail_id) + """', '""" + page_size + """', '""" + png_blob + """' , '""" + pdf_blob + """', '""" + str(img_width) + """', '""" + str(img_height) + """', '""" + str(page_number) + """');"""
    cursor.execute(
        "INSERT INTO `image`(upload_file_id, thumbnail_id, page_size, png, pdf, width, height, page_number) VALUES (%s, %s, %s, %s, %s, %s, %s, %s);",
        (uploaded_file_id, str(thumbnail_id), page_size, png_blob, pdf_blob,
         img_width, img_height, ""))
    db.commit()
    cursor.close()
    db = mysql.connector.Connect(**login_info)
    cursor = db.cursor()
    sql_query = """SELECT * FROM `image` WHERE upload_file_id LIKE '%s' AND page_number LIKE '%s'""" % (
        uploaded_file_id, str(page_number))
    cursor.execute(sql_query)
    current_image_id = 0
    for row in cursor:
        current_image_id = row[0]
        break
    cursor.close()
    process_end_time = time.time()

    total_time = str(
        round(process_end_time - process_start_time,
              config.time_digit_precision))
    print(
        "[" + str(uploaded_file_id) + "," + str(page_number) +
        "] High Resolution PNG Generated. Details Added to 'image' table. Time: "
        + total_time)
    config.write_to_file(
        config.log_file_name,
        "[" + str(uploaded_file_id) + "," + str(page_number) +
        "] High Resolution PNG Generated. Details Added to 'image' table. Time: "
        + total_time)
    log_dictionary['all_steps'] += "[" + str(
        page_number
    ) + "] High Resolution PNG Generated Details Added to 'image' table. Time: " + total_time + "\n"
    log_dictionary = convert_to_tiles.generate_tiles(
        config.store_high_res_images +
        filename.replace(config.store_split_pdfs, "") + ".png",
        current_image_id, log_dictionary, page_number, uploaded_file_id,
        time.time())
    return log_dictionary
Ejemplo n.º 10
0
def generate_tiles(filename, image_id, log_dictionary, page_number,
                   uploaded_file_id, process_start_time):
    """
		This function generates tiles for the high resolution image that is created. The tile details are stored in the 'tiles' table.
		Input: High resolution PNG
		Output: Tiles for the input PNG
	"""

    import tempfile, shutil
    import deepzoom
    import mysql.connector
    from database import login_info
    import config
    import os
    import time

    tmpdir = tempfile.mkdtemp()
    tmpimg = str(image_id) + "_image.png"
    tmpdzi = str(image_id) + "_tmp.dzi"
    image_name = tmpdir + "/" + tmpimg
    dzi_name = tmpdir + "/" + tmpdzi

    with open(image_name, "wb") as img_file:
        with open(filename, "rb") as read_file:
            img_file.write(read_file.read())

    creator = deepzoom.ImageCreator(tile_size=config.tile_pixel_size,
                                    tile_overlap=2,
                                    tile_format="png",
                                    image_quality=1,
                                    resize_filter="bicubic")
    creator.create(image_name, dzi_name)
    width, height = creator.image.size

    basepath = tmpdir + "/" + str(image_id) + "_tmp_files"

    db = mysql.connector.Connect(**login_info)
    cursor = db.cursor()

    for d in os.listdir(basepath):
        curpath = os.path.join(basepath, d)
        if os.path.isdir(curpath):
            for f in os.listdir(curpath):
                if os.path.isfile(os.path.join(curpath, f)):
                    with open(os.path.join(curpath, f), "rb") as tile_file:
                        tile_blob = tile_file.read()
                        cursor.execute(
                            "INSERT INTO tiles values (NULL,%s,%s,%s)", (
                                str(image_id),
                                d + '_' + f[:-4],
                                tile_blob,
                            ))
                        db.commit()
    shutil.rmtree(tmpdir)
    cursor.close()
    db.close()

    process_end_time = time.time()
    total_time = str(
        round(process_end_time - process_start_time,
              config.time_digit_precision))
    log_dictionary['all_steps'] += "[" + str(
        page_number
    ) + "] Tile Generation Complete. Details Added to 'tiles' table. Time " + total_time + "\n"
    print("[" + str(uploaded_file_id) + "," + str(page_number) +
          "] Generated Tiles and Added Details to 'tiles' table. Time: " +
          total_time)
    config.write_to_file(
        config.log_file_name,
        "[" + str(uploaded_file_id) + "," + str(page_number) +
        "] Generated Tiles and Added Details to 'tiles' table. Time: " +
        total_time)
    return log_dictionary
Ejemplo n.º 11
0
def split_pdfs(file_name, execution_start_time, uploaded_file_id):
    """
    This scipt will convert single pdf file with multiple pages into individual PDF files. Then carry out the rest of the functionality: Convert to thumnbail, compare generated thumbnail, generate high resolution image and also generate tiles.
    Input: Uploaded PDF
    Output: Individual Page PDFs
    """

    from PyPDF2 import PdfFileWriter, PdfFileReader
    import time
    import convert_single_file
    import config
    import os
    from database import login_info
    import convert_to_high_res_png
    import mysql.connector
    import compareCompatibleFiles
    import json

    log_dictionary = {}

    file_hash = compute_hash(file_name)
    start = time.time()
    inputpdf = PdfFileReader(open(file_name, "rb"))

    log_dictionary['all_steps'] = "Splitting PDF File into " + str(
        inputpdf.numPages) + " Pages\n"

    print("[" + str(uploaded_file_id) + "] Splitting PDF File into " +
          str(inputpdf.numPages) + " Pages")
    config.write_to_file(
        config.log_file_name, "[" + str(uploaded_file_id) +
        "] Splitting PDF File into " + str(inputpdf.numPages) + " Pages")
    for i in xrange(inputpdf.numPages):
        output = PdfFileWriter()
        output.addPage(inputpdf.getPage(i))
        if not os.path.exists(config.store_split_pdfs):
            os.makedirs(config.store_split_pdfs)
        with open(
                config.store_split_pdfs +
                file_name.replace(config.store_main_uploads, "") +
                "%s.pdf" % i, "wb") as outputStream:
            # with open(config.store_split_pdfs+file_name.replace(config.store_main_uploads,"")+"%s.pdf" % i, "wb") as outputStream:
            output.write(outputStream)

        new_file = PdfFileReader(
            open(
                config.store_split_pdfs +
                file_name.replace(config.store_main_uploads, "") +
                "%s.pdf" % i, 'rb'))
        file_width = new_file.getPage(0).mediaBox.getWidth()
        file_height = new_file.getPage(0).mediaBox.getHeight()

        orientation = ""

        if file_width > file_height:
            orientation = "Landscape"
        else:
            orientation = "Portrait"

        start_thumbnail_generation = time.time()
        image_width, thumbnail_image_name, thumbnail_id = convert_single_file.convert_pdf_to_png(
            config.store_split_pdfs +
            file_name.replace(config.store_main_uploads, "") + "%s.pdf" % i,
            orientation, "PDF", uploaded_file_id, file_width, file_height)
        end_thumbnail_generation = time.time()

        total_time = str(
            round(end_thumbnail_generation - start_thumbnail_generation,
                  config.time_digit_precision))

        log_dictionary['all_steps'] += '[' + str(
            i
        ) + '] Thumbnail Generation Complete. Added Details to \'thumbnail\' table. Time: ' + total_time + '\n'
        print(
            "[" + str(uploaded_file_id) + ',' + str(i) +
            '] Thumbnail Generation Complete. Added Details to \'thumbnail\' table  Time: '
            + total_time)
        config.write_to_file(
            config.log_file_name, "[" + str(uploaded_file_id) + ',' + str(i) +
            '] Thumbnail Generation Complete. Added Details to \'thumbnail\' table  Time: '
            + total_time)
        log_dictionary[
            'all_steps'] += "Finding existing images to compare with\n"

        start_thumbnail_comparison = time.time()
        checkVariable, similarThumbnailID = compareCompatibleFiles.compare(
            thumbnail_image_name, uploaded_file_id, str(i), file_width,
            file_height)
        end_thumbnail_comparison = time.time()

        total_time = str(
            round(end_thumbnail_comparison - start_thumbnail_comparison,
                  config.time_digit_precision))

        log_dictionary['all_steps'] += '[' + str(
            i
        ) + '] Thumbnails Compared. Comparison Details added to \'thumb_comparison\' table' + total_time + '\n'
        print(
            "[" + str(uploaded_file_id) + ',' + str(i) +
            '] Thumbnails Compared. Comparison Details added to \'thumb_comparison\' table. Time: '
            + total_time)
        config.write_to_file(
            config.log_file_name, "[" + str(uploaded_file_id) + ',' + str(i) +
            '] Thumbnails Compared. Comparison Details added to \'thumb_comparison\' table. Time: '
            + total_time)
        if checkVariable == "True":
            log_dictionary = convert_to_high_res_png.convert(
                config.store_split_pdfs +
                file_name.replace(config.store_main_uploads, "") +
                "%s.pdf" % i, file_width, file_height, i, file_name, file_hash,
                execution_start_time, log_dictionary, uploaded_file_id,
                time.time(), thumbnail_id)
        else:
            log_dictionary['all_steps'] += '[' + str(
                i
            ) + '] Thumbnail matches with Thumbnail ID: ' + similarThumbnailID + '\n'
            # log_dictionary['page'+str(i)]['time'] = str(time.time() - execution_start_time)

        if config.keep_split_pdf_pages == 0:
            os.remove(config.store_split_pdfs +
                      file_name.replace(config.store_main_uploads, "") +
                      "%s.pdf" % i)
    log_dictionary['total_time'] = str(time.time() - execution_start_time)
    sql_query = "UPDATE upload_file SET log = '" + (str(
        json.dumps(log_dictionary)).replace('"', '\\"')).replace(
            "'",
            "\\'") + "' WHERE hash = '" + file_hash + "' AND processed = '1'"
    db = mysql.connector.Connect(**login_info)
    cursor = db.cursor()
    cursor.execute(sql_query)
    db.commit()
    cursor.close()
Ejemplo n.º 12
0
def start_stream():
    global darkice
    global darkice_stderr_queue
    global darkice_stdout_queue
    values = request.json

    alsacap = subprocess.Popen('alsacap -R',
                               shell=True,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
    output, error = alsacap.communicate()

    found_usb = False
    channels = None
    sampling_rate = None
    for line in output.splitlines():
        print line
        if found_usb and 'channel' in line and 'sampling rate' in line:
            print "line:"
            print line.strip()
            channels, sampling_rate = parse_card_info_string(line.strip())
            break
        if 'USB' in line:  # start capturing data
            print "start capturing data"
            found_usb = True

    print(channels)
    print(sampling_rate)

    if channels and sampling_rate:
        darkice_config['audio'].channel.value = max(
            channels)  # todo fix this max/min
        darkice_config['audio'].sampleRate.value = max(
            sampling_rate)  # todo fix this max/min

    darkice_config['servers'][0].name.value = unicode(values['name'])
    darkice_config['servers'][0].genre.value = unicode(values['genre'])
    darkice_config['servers'][0].url.value = unicode(values['url'])
    darkice_config['servers'][0].description.value = unicode(
        values['description'])

    print app_config['recording_folder']
    print unicode(values['name'])

    darkice_config['servers'][0].localDumpFile.value = path.join(
        app_config['recording_folder'], unicode(values['name'] + '.mp3'))

    try:
        with codecs.open('test.cfg', mode='wb',
                         encoding='utf-8') as config_file:
            config.write_to_file(config_file, darkice_config['general'],
                                 darkice_config['audio'],
                                 darkice_config['servers'])
            filename = config_file.name
            print filename
    except IOError as e:
        print("there is an error")
        return {'error': 'File not availabe: {}'.format(e)}

    darkice = subprocess.Popen('sudo darkice -c {}'.format(filename),
                               stderr=subprocess.PIPE,
                               stdout=subprocess.PIPE,
                               shell=True)

    # non blocking reading of stdout and stderr for darkice status
    thread_output = Thread(target=read_nonblocking_output,
                           args=(darkice.stdout, darkice_stdout_queue))
    thread_output.daemon = True
    thread_output.start()
    thread_error = Thread(target=read_nonblocking_error,
                          args=(darkice.stderr, darkice_stderr_queue))
    thread_error.daemon = True
    thread_error.start()

    return get_stream_status()