def upgrade():
    with session_scope(MYCODO_DB_PATH) as conditional_sess:
        for each_conditional in conditional_sess.query(Conditional).all():
            try:
                indented_code = textwrap.indent(
                    each_conditional.conditional_statement, ' ' * 8)

                cond_statement_run = pre_statement_run + indented_code
                cond_statement_run = cond_statement_replace(cond_statement_run)

                assure_path_exists(PATH_PYTHON_CODE_USER)
                file_run = '{}/conditional_{}.py'.format(
                    PATH_PYTHON_CODE_USER, each_conditional.unique_id)
                with open(file_run, 'w') as fw:
                    fw.write('{}\n'.format(cond_statement_run))
                    fw.close()
            except Exception as msg:
                print("Exception: {}".format(msg))

            # Inputs
        with session_scope(MYCODO_DB_PATH) as input_sess:
            for each_input in input_sess.query(Input).all():
                if each_input.device == 'PythonCode' and each_input.cmd_command:
                    try:
                        execute_at_creation(each_input.unique_id,
                                            each_input.cmd_command,
                                            None)
                    except Exception as msg:
                        print("Exception: {}".format(msg))
Example #2
0
    def save_output_python_pwm_code(self, unique_id):
        """Save python PWM code to files"""
        pre_statement_run = f"""import os
import sys
sys.path.append(os.path.abspath('/var/mycodo-root'))
from mycodo.mycodo_client import DaemonControl
control = DaemonControl()
output_id = '{unique_id}'

class OutputRun:
    def __init__(self, logger, output_id):
        self.logger = logger
        self.output_id = output_id
        self.variables = {{}}
        self.running = True
        self.duty_cycle = None

    def stop_output(self):
        self.running = False

    def output_code_run(self, duty_cycle):
"""

        code_replaced = self.options_channels['pwm_command'][0].replace(
            '((duty_cycle))', 'duty_cycle')
        indented_code = textwrap.indent(code_replaced, ' ' * 8)
        full_command_pwm = pre_statement_run + indented_code

        assure_path_exists(PATH_PYTHON_CODE_USER)
        file_run = '{}/output_pwm_{}.py'.format(PATH_PYTHON_CODE_USER,
                                                unique_id)
        with open(file_run, 'w') as fw:
            fw.write('{}\n'.format(full_command_pwm))
            fw.close()
        set_user_grp(file_run, 'mycodo', 'mycodo')
Example #3
0
def export_influxdb(form):
    """
    Save the Mycodo InfluxDB database in the Enterprise-compatible format, zip
    archive it, and serve it to the user.
    """
    action = '{action} {controller}'.format(
        action=TRANSLATIONS['export']['title'],
        controller=TRANSLATIONS['measurement']['title'])
    error = []

    try:
        influx_backup_dir = os.path.join(INSTALL_DIRECTORY, 'influx_backup')

        # Delete influxdb directory if it exists
        if os.path.isdir(influx_backup_dir):
            shutil.rmtree(influx_backup_dir)

        # Create new directory (make sure it's empty)
        assure_path_exists(influx_backup_dir)

        cmd = "/usr/bin/influxd backup -database {db} -portable {path}".format(
            db=INFLUXDB_DATABASE, path=influx_backup_dir)
        _, _, status = cmd_output(cmd)

        influxd_version_out, _, _ = cmd_output('/usr/bin/influxd version')
        if influxd_version_out:
            influxd_version = influxd_version_out.decode('utf-8').split(' ')[1]
        else:
            influxd_version = None
            error.append("Could not determine Influxdb version")

        if not status and influxd_version:
            # Zip all files in the influx_backup directory
            data = io.BytesIO()
            with zipfile.ZipFile(data, mode='w') as z:
                for _, _, files in os.walk(influx_backup_dir):
                    for filename in files:
                        z.write(os.path.join(influx_backup_dir, filename),
                                filename)
            data.seek(0)

            # Delete influxdb directory if it exists
            if os.path.isdir(influx_backup_dir):
                shutil.rmtree(influx_backup_dir)

            # Send zip file to user
            return send_file(
                data,
                mimetype='application/zip',
                as_attachment=True,
                attachment_filename='Mycodo_{mv}_Influxdb_{iv}_{host}_{dt}.zip'
                .format(
                    mv=MYCODO_VERSION,
                    iv=influxd_version,
                    host=socket.gethostname().replace(' ', ''),
                    dt=datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")))
    except Exception as err:
        error.append("Error: {}".format(err))

    flash_success_errors(error, action, url_for('routes_page.page_export'))
Example #4
0
def export_influxdb(form):
    """
    Save the InfluxDB metastore and mycodo_db database to a zip file and serve
    it to the user
    """
    action = '{action} {controller}'.format(
        action=TRANSLATIONS['export']['title'],
        controller=TRANSLATIONS['measurement']['title'])
    error = []

    try:
        influx_backup_dir = os.path.join(INSTALL_DIRECTORY, 'influx_backup')

        # Delete influxdb directory if it exists
        if os.path.isdir(influx_backup_dir):
            shutil.rmtree(influx_backup_dir)

        # Create new directory (make sure it's empty)
        assure_path_exists(influx_backup_dir)

        cmd = "/usr/bin/influxd backup -database mycodo_db {path}".format(
            path=influx_backup_dir)
        _, _, status = cmd_output(cmd)

        influxd_version_out, _, _ = cmd_output(
            '/usr/bin/influxd version')
        if influxd_version_out:
            influxd_version = influxd_version_out.decode('utf-8').split(' ')[1]
        else:
            influxd_version = None
            error.append("Could not determine Influxdb version")

        if not status and influxd_version:
            # Zip all files in the influx_backup directory
            data = io.BytesIO()
            with zipfile.ZipFile(data, mode='w') as z:
                for _, _, files in os.walk(influx_backup_dir):
                    for filename in files:
                        z.write(os.path.join(influx_backup_dir, filename),
                                filename)
            data.seek(0)

            # Delete influxdb directory if it exists
            if os.path.isdir(influx_backup_dir):
                shutil.rmtree(influx_backup_dir)

            # Send zip file to user
            return send_file(
                data,
                mimetype='application/zip',
                as_attachment=True,
                attachment_filename='Mycodo_{mv}_Influxdb_{iv}_{host}_{dt}.zip'.format(
                     mv=MYCODO_VERSION, iv=influxd_version,
                     host=socket.gethostname().replace(' ', ''),
                     dt=datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
            )
    except Exception as err:
        error.append("Error: {}".format(err))

    flash_success_errors(error, action, url_for('routes_page.page_export'))
Example #5
0
def generate_relay_usage_report():
    """
    Generate relay usage report in a csv file

    """
    logger.debug("Generating relay usage report...")
    try:
        assure_path_exists(USAGE_REPORTS_PATH)

        misc = db_retrieve_table_daemon(Misc, entry='first')
        relay = db_retrieve_table_daemon(Relay)
        relay_usage = return_relay_usage(misc, relay.all())

        timestamp = time.strftime("%Y-%m-%d_%H-%M")
        file_name = 'relay_usage_report_{ts}.csv'.format(ts=timestamp)
        report_path_file = os.path.join(USAGE_REPORTS_PATH, file_name)

        with open(report_path_file, 'wb') as f:
            w = csv.writer(f)
            # Header row
            w.writerow([
                'Relay ID', 'Relay Unique ID', 'Relay Name', 'Type',
                'Past Day', 'Past Week', 'Past Month',
                'Past Month (from {})'.format(misc.relay_usage_dayofmonth),
                'Past Year'
            ])
            for key, value in relay_usage.items():
                if key in ['total_duration', 'total_cost', 'total_kwh']:
                    # Totals rows
                    w.writerow([
                        '', '', '', key, value['1d'], value['1w'], value['1m'],
                        value['1m_date'], value['1y']
                    ])
                else:
                    # Each relay rows
                    each_relay = relay.filter(Relay.id == key).first()
                    w.writerow([
                        each_relay.id, each_relay.unique_id, each_relay.name,
                        'hours_on', value['1d']['hours_on'],
                        value['1w']['hours_on'], value['1m']['hours_on'],
                        value['1m_date']['hours_on'], value['1y']['hours_on']
                    ])
                    w.writerow([
                        each_relay.id, each_relay.unique_id, each_relay.name,
                        'kwh', value['1d']['kwh'], value['1w']['kwh'],
                        value['1m']['kwh'], value['1m_date']['kwh'],
                        value['1y']['kwh']
                    ])
                    w.writerow([
                        each_relay.id, each_relay.unique_id, each_relay.name,
                        'cost', value['1d']['cost'], value['1w']['cost'],
                        value['1m']['cost'], value['1m_date']['cost'],
                        value['1y']['cost']
                    ])

        set_user_grp(report_path_file, 'mycodo', 'mycodo')
    except Exception:
        logger.exception("Relay Usage Report Generation ERROR")
Example #6
0
def save_conditional_code(error, cond_statement, unique_id, test=False):
    indented_code = textwrap.indent(
        cond_statement, ' ' * 8)

    cond_statement_run = pre_statement_run + indented_code
    cond_statement_run = cond_statement_replace(cond_statement_run)

    assure_path_exists(PATH_PYTHON_CODE_USER)
    file_run = '{}/conditional_{}.py'.format(
        PATH_PYTHON_CODE_USER, unique_id)
    with open(file_run, 'w') as fw:
        fw.write('{}\n'.format(cond_statement_run))
        fw.close()
    set_user_grp(file_run, 'mycodo', 'mycodo')

    if len(cond_statement_run.splitlines()) > 999:
        error.append("Too many lines in code. Reduce code to less than 1000 lines.")

    if test:
        lines_code = ''
        for line_num, each_line in enumerate(cond_statement_run.splitlines(), 1):
            if len(str(line_num)) == 3:
                line_spacing = ''
            elif len(str(line_num)) == 2:
                line_spacing = ' '
            else:
                line_spacing = '  '
            lines_code += '{sp}{ln}: {line}\n'.format(
                sp=line_spacing,
                ln=line_num,
                line=each_line)

        cmd_test = 'export PYTHONPATH=$PYTHONPATH:/var/mycodo-root && ' \
                   'pylint3 -d I,W0621,C0103,C0111,C0301,C0327,C0410,C0413 {path}'.format(
            path=file_run)
        cmd_out, _, cmd_status = cmd_output(cmd_test)

        message = Markup(
            '<pre>\n\n'
            'Full Conditional Statement code:\n\n{code}\n\n'
            'Conditional Statement code analysis:\n\n{report}'
            '</pre>'.format(
                code=lines_code, report=cmd_out.decode("utf-8")))
        if cmd_status:
            flash('Error(s) were found while evaluating your code. Review '
                  'the error(s), below, and fix them before activating your '
                  'Conditional.', 'error')
            flash(message, 'error')
        else:
            flash(
                "No errors were found while evaluating your code. However, "
                "this doesn't mean your code will perform as expected. "
                "Review your code for issues and test your Conditional "
                "before putting it into a production environment.", 'success')
            flash(message, 'success')

    return error
def create_python_file(python_code_run, filename):
    assure_path_exists(PATH_PYTHON_CODE_USER)
    file_run = os.path.join(PATH_PYTHON_CODE_USER, filename)
    with open(file_run, 'w') as fw:
        fw.write('{}\n'.format(python_code_run))
        fw.close()
    set_user_grp(file_run, 'mycodo', 'mycodo')

    return python_code_run, file_run
Example #8
0
def note_mod(form):
    action = '{action} {controller}'.format(
        action=TRANSLATIONS['modify']['title'],
        controller=TRANSLATIONS['note']['title'])
    error = []
    list_tags = []

    mod_note = Notes.query.filter(
        Notes.unique_id == form.note_unique_id.data).first()

    if not form.name.data:
        error.append("Name cannot be left blank")
    if not form.note_tags.data:
        error.append("At least one tag must be selected")
    if not form.note.data:
        error.append("Note cannot be left blank")

    try:
        for each_tag in form.note_tags.data:
            check_tag = NoteTags.query.filter(
                NoteTags.unique_id == each_tag).first()
            if not check_tag:
                error.append("Invalid tag: {}".format(each_tag))
            else:
                list_tags.append(check_tag.unique_id)
    except Exception as msg:
        error.append("Invalid tag format: {}".format(msg))

    try:
        mod_note.date_time = datetime_time_to_utc(form.date_time.data)
    except:
        error.append("Error while parsing date/time")

    if form.files.data:
        assure_path_exists(PATH_NOTE_ATTACHMENTS)
        if mod_note.files:
            filename_list = mod_note.files.split(",")
        else:
            filename_list = []
        for each_file in form.files.raw_data:
            file_name = "{pre}_{name}".format(pre=mod_note.unique_id,
                                              name=each_file.filename)
            file_save_path = os.path.join(PATH_NOTE_ATTACHMENTS, file_name)
            each_file.save(file_save_path)
            filename_list.append(file_name)
        mod_note.files = ",".join(filename_list)

    if not error:
        mod_note.name = form.name.data
        mod_note.tags = ",".join(list_tags)
        mod_note.note = form.note.data
        db.session.commit()

    flash_success_errors(error, action, url_for('routes_page.page_notes'))
Example #9
0
def save_conditional_code(error,
                          cond_statement,
                          unique_id,
                          table_conditions_all,
                          table_actions_all,
                          test=False):
    lines_code = None
    cmd_status = None
    cmd_out = None

    try:
        indented_code = textwrap.indent(cond_statement, ' ' * 8)

        cond_statement_run = pre_statement_run + indented_code
        cond_statement_run = cond_statement_replace(cond_statement_run,
                                                    table_conditions_all,
                                                    table_actions_all)

        assure_path_exists(PATH_PYTHON_CODE_USER)
        file_run = '{}/conditional_{}.py'.format(PATH_PYTHON_CODE_USER,
                                                 unique_id)
        with open(file_run, 'w') as fw:
            fw.write('{}\n'.format(cond_statement_run))
            fw.close()
        set_user_grp(file_run, 'mycodo', 'mycodo')

        if len(cond_statement_run.splitlines()) > 999:
            error.append(
                "Too many lines in code. Reduce code to less than 1000 lines.")

        if test:
            lines_code = ''
            for line_num, each_line in enumerate(
                    cond_statement_run.splitlines(), 1):
                if len(str(line_num)) == 3:
                    line_spacing = ''
                elif len(str(line_num)) == 2:
                    line_spacing = ' '
                else:
                    line_spacing = '  '
                lines_code += '{sp}{ln}: {line}\n'.format(sp=line_spacing,
                                                          ln=line_num,
                                                          line=each_line)

            cmd_test = 'mkdir -p /var/mycodo-root/.pylint.d && ' \
                       'export PYTHONPATH=$PYTHONPATH:/var/mycodo-root && ' \
                       'export PYLINTHOME=/var/mycodo-root/.pylint.d && ' \
                       'pylint3 -d I,W0621,C0103,C0111,C0301,C0327,C0410,C0413,R0912,R0914,R0915 {path}'.format(
                           path=file_run)
            cmd_out, _, cmd_status = cmd_output(cmd_test)
    except Exception as err:
        error.append("Error saving/testing conditional code: {}".format(err))

    return error, lines_code, cmd_status, cmd_out
Example #10
0
def note_mod(form):
    action = '{action} {controller}'.format(
        action=TRANSLATIONS['modify']['title'],
        controller=TRANSLATIONS['note']['title'])
    error = []
    list_tags = []

    mod_note = Notes.query.filter(
        Notes.unique_id == form.note_unique_id.data).first()

    if not form.name.data:
        error.append("Name cannot be left blank")
    if not form.note_tags.data:
        error.append("At least one tag must be selected")
    if not form.note.data:
        error.append("Note cannot be left blank")

    try:
        for each_tag in form.note_tags.data:
            check_tag = NoteTags.query.filter(
                NoteTags.unique_id == each_tag).first()
            if not check_tag:
                error.append("Invalid tag: {}".format(each_tag))
            else:
                list_tags.append(check_tag.unique_id)
    except Exception as msg:
        error.append("Invalid tag format: {}".format(msg))

    try:
        mod_note.date_time = datetime_time_to_utc(form.date_time.data)
    except:
        error.append("Error while parsing date/time")

    if form.files.data:
        assure_path_exists(PATH_NOTE_ATTACHMENTS)
        if mod_note.files:
            filename_list = mod_note.files.split(",")
        else:
            filename_list = []
        for each_file in form.files.raw_data:
            file_name = "{pre}_{name}".format(
                pre=mod_note.unique_id, name=each_file.filename)
            file_save_path = os.path.join(PATH_NOTE_ATTACHMENTS, file_name)
            each_file.save(file_save_path)
            filename_list.append(file_name)
        mod_note.files = ",".join(filename_list)

    if not error:
        mod_note.name = form.name.data
        mod_note.tags = ",".join(list_tags)
        mod_note.note = form.note.data
        db.session.commit()

    flash_success_errors(error, action, url_for('routes_page.page_notes'))
Example #11
0
def generate_thermal_image_from_timestamp(unique_id, timestamp):
    """Return a file from the note attachment directory"""
    ts_now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
    camera_path = assure_path_exists(
        os.path.join(PATH_CAMERAS, '{uid}'.format(uid=unique_id)))
    filename = 'Still-{uid}-{ts}.jpg'.format(
        uid=unique_id,
        ts=ts_now).replace(" ", "_")
    save_path = assure_path_exists(os.path.join(camera_path, 'thermal'))
    assure_path_exists(save_path)
    path_file = os.path.join(save_path, filename)

    dbcon = InfluxDBClient(
        INFLUXDB_HOST,
        INFLUXDB_PORT,
        INFLUXDB_USER,
        INFLUXDB_PASSWORD,
        INFLUXDB_DATABASE)

    input_dev = Input.query.filter(Input.unique_id == unique_id).first()
    pixels = []
    success = True

    start = int(int(timestamp) / 1000.0)  # Round down
    end = start + 1  # Round up

    start_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(start))
    end_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(end))

    for each_channel in range(input_dev.channels):
        measurement = 'channel_{chan}'.format(
            chan=each_channel)
        query_str = query_string(measurement, unique_id,
                                 start_str=start_timestamp,
                                 end_str=end_timestamp)
        if query_str == 1:
            logger.error('Invalid query string')
            success = False
        else:
            raw_data = dbcon.query(query_str).raw
            if not raw_data or 'series' not in raw_data or not raw_data['series']:
                logger.error('No measurements to export in this time period')
                success = False
            else:
                pixels.append(raw_data['series'][0]['values'][0][1])

    # logger.error("generate_thermal_image_from_timestamp: success: {}, pixels: {}".format(success, pixels))

    if success:
        generate_thermal_image_from_pixels(pixels, 8, 8, path_file)
        return send_file(path_file, mimetype='image/jpeg')
    else:
        return "Could not generate image"
Example #12
0
def generate_thermal_image_from_timestamp(unique_id, timestamp):
    """Return a file from the note attachment directory"""
    ts_now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
    camera_path = assure_path_exists(
        os.path.join(PATH_CAMERAS, '{uid}'.format(uid=unique_id)))
    filename = 'Still-{uid}-{ts}.jpg'.format(
        uid=unique_id,
        ts=ts_now).replace(" ", "_")
    save_path = assure_path_exists(os.path.join(camera_path, 'thermal'))
    assure_path_exists(save_path)
    path_file = os.path.join(save_path, filename)

    current_app.config['INFLUXDB_USER'] = INFLUXDB_USER
    current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD
    current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE
    current_app.config['INFLUXDB_TIMEOUT'] = 5
    dbcon = influx_db.connection

    input_dev = Input.query.filter(Input.unique_id == unique_id).first()
    pixels = []
    success = True

    start = int(int(timestamp) / 1000.0)  # Round down
    end = start + 1  # Round up

    start_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(start))
    end_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(end))

    for each_channel in range(input_dev.channels):
        measurement = 'channel_{chan}'.format(
            chan=each_channel)
        query_str = query_string(measurement, unique_id,
                                 start_str=start_timestamp,
                                 end_str=end_timestamp)
        if query_str == 1:
            logger.error('Invalid query string')
            success = False
        else:
            raw_data = dbcon.query(query_str).raw
            if not raw_data or 'series' not in raw_data:
                logger.error('No measurements to export in this time period')
                success = False
            else:
                pixels.append(raw_data['series'][0]['values'][0][1])

    # logger.error("generate_thermal_image_from_timestamp: success: {}, pixels: {}".format(success, pixels))

    if success:
        generate_thermal_image_from_pixels(pixels, 8, 8, path_file)
        return send_file(path_file, mimetype='image/jpeg')
    else:
        return "Could not generate image"
Example #13
0
 def create_settings_backup(self, filename):
     path_save = os.path.join(PATH_SETTINGS_BACKUP, filename)
     assure_path_exists(PATH_SETTINGS_BACKUP)
     if os.path.exists(path_save):
         self.logger.debug(
             "Skipping backup of settings: "
             "File already exists: {}".format(path_save))
     else:
         status, saved_path = create_settings_export(save_path=path_save)
         if not status:
             self.logger.debug("Saved settings file: "
                               "{}".format(saved_path))
         else:
             self.logger.debug("Could not create settings file: "
                               "{}".format(saved_path))
Example #14
0
def camera_img_return_path(camera_unique_id, img_type, filename):
    """Return an image from stills or time-lapses"""
    camera = Camera.query.filter(Camera.unique_id == camera_unique_id).first()
    camera_path = assure_path_exists(
        os.path.join(PATH_CAMERAS, '{uid}'.format(uid=camera.unique_id)))
    if img_type == 'still':
        if camera.path_still:
            path = camera.path_still
        else:
            path = os.path.join(camera_path, img_type)
    elif img_type == 'timelapse':
        if camera.path_timelapse:
            path = camera.path_timelapse
        else:
            path = os.path.join(camera_path, img_type)
    else:
        return "Unknown Image Type"

    if os.path.isdir(path):
        files = (files for files in os.listdir(path)
                 if os.path.isfile(os.path.join(path, files)))
    else:
        files = []
    if filename in files:
        path_file = os.path.join(path, filename)
        return send_file(path_file, mimetype='image/jpeg')

    return "Image not found"
Example #15
0
def camera_timelapse_video(form_camera):
    action = "Generate Timelapse Video"
    error = []

    if not os.path.exists("/usr/bin/ffmpeg"):
        error.append(
            "ffmpeg not found. Install with 'sudo apt install ffmpeg'")

    if not error:
        try:
            camera = db_retrieve_table(Camera,
                                       unique_id=form_camera.camera_id.data)
            camera_path = assure_path_exists(
                os.path.join(PATH_CAMERAS,
                             '{uid}'.format(uid=camera.unique_id)))
            timelapse_path = assure_path_exists(
                os.path.join(camera_path, 'timelapse'))
            video_path = assure_path_exists(
                os.path.join(camera_path, 'timelapse_video'))
            timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
            path_file = os.path.join(
                video_path, "Video_{name}_{ts}.mp4".format(
                    name=form_camera.timelapse_image_set.data, ts=timestamp))

            cmd =  "/usr/bin/ffmpeg " \
                   "-f image2 " \
                   "-r {fps} " \
                   "-i {path}/{seq}-%05d.jpg " \
                   "-vcodec {codec} " \
                   "-y {save}".format(
                        seq=form_camera.timelapse_image_set.data,
                        fps=form_camera.timelapse_fps.data,
                        path=timelapse_path,
                        codec=form_camera.timelapse_codec.data,
                        save=path_file)
            subprocess.Popen(cmd, shell=True)
            flash(
                "The time-lapse video is being generated in the background with the command:\n"
                "{}".format(cmd), "success")
            flash("The video will be saved at "
                  "{}".format(path_file), "success")
        except Exception as except_msg:
            error.append(except_msg)

    flash_success_errors(error, action, url_for('routes_page.page_camera'))
Example #16
0
def save_conditional_code():
    with session_scope(MYCODO_DB_PATH) as conditional_sess:
        for each_conditional in conditional_sess.query(Conditional).all():
            try:
                indented_code = textwrap.indent(
                    each_conditional.conditional_statement, ' ' * 8)

                cond_statement_run = pre_statement_run + indented_code
                cond_statement_run = cond_statement_replace(cond_statement_run)

                assure_path_exists(PATH_PYTHON_CODE_USER)
                file_run = '{}/conditional_{}.py'.format(
                    PATH_PYTHON_CODE_USER, each_conditional.unique_id)
                with open(file_run, 'w') as fw:
                    fw.write('{}\n'.format(cond_statement_run))
                    fw.close()
            except Exception as msg:
                print("Exception: {}".format(msg))
Example #17
0
def generate_code(unique_id, new_input):
    error = []
    pre_statement_run = """import os
import sys
sys.path.append(os.path.abspath('/var/mycodo-root'))
from mycodo.mycodo_client import DaemonControl
from mycodo.utils.influx import add_measurements_influxdb
control = DaemonControl()

class PythonInputRun:
    def __init__(self, logger, input_id, measurement_info):
        self.logger = logger
        self.input_id = input_id
        self.measurement_info = measurement_info

    def store_measurement(self, channel=None, measurement=None, timestamp=None):
        if None in [channel, measurement]:
            return
        measure = {channel: {}}
        measure[channel]['measurement'] = self.measurement_info[channel]['measurement']
        measure[channel]['unit'] = self.measurement_info[channel]['unit']
        measure[channel]['value'] = measurement
        if timestamp:
            measure[channel]['timestamp_utc'] = timestamp
        add_measurements_influxdb(self.input_id, measure)

    def python_code_run(self):
"""
    indented_code = textwrap.indent(new_input.cmd_command, ' ' * 8)
    input_python_code_run = pre_statement_run + indented_code

    assure_path_exists(PATH_PYTHON_CODE_USER)
    file_run = '{}/input_python_code_{}.py'.format(
        PATH_PYTHON_CODE_USER, unique_id)
    with open(file_run, 'w') as fw:
        fw.write('{}\n'.format(input_python_code_run))
        fw.close()
    set_user_grp(file_run, 'mycodo', 'mycodo')

    for each_error in error:
        flash(each_error, 'error')

    return input_python_code_run, file_run
Example #18
0
def camera_timelapse_video(form_camera):
    messages = {"success": [], "info": [], "warning": [], "error": []}

    if not os.path.exists("/usr/bin/ffmpeg"):
        messages["error"].append(
            "ffmpeg not found. Install with 'sudo apt install ffmpeg'")

    if not messages["error"]:
        try:
            camera = db_retrieve_table(Camera,
                                       unique_id=form_camera.camera_id.data)
            camera_path = assure_path_exists(
                os.path.join(PATH_CAMERAS,
                             '{uid}'.format(uid=camera.unique_id)))
            timelapse_path = assure_path_exists(
                os.path.join(camera_path, 'timelapse'))
            video_path = assure_path_exists(
                os.path.join(camera_path, 'timelapse_video'))
            timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
            path_file = os.path.join(
                video_path, "Video_{name}_{ts}.mp4".format(
                    name=form_camera.timelapse_image_set.data, ts=timestamp))

            cmd =  "/usr/bin/ffmpeg " \
                   "-f image2 " \
                   "-r {fps} " \
                   "-i {path}/{seq}-%05d.jpg " \
                   "-vcodec {codec} " \
                   "-y {save}".format(
                        seq=form_camera.timelapse_image_set.data,
                        fps=form_camera.timelapse_fps.data,
                        path=timelapse_path,
                        codec=form_camera.timelapse_codec.data,
                        save=path_file)
            subprocess.Popen(cmd, shell=True)
            messages["success"].append(
                "The time-lapse video is being generated in the background with the command: {}."
                " The video will be saved at {}".format(cmd, path_file))
        except Exception as except_msg:
            messages["error"].append(except_msg)

    return messages
Example #19
0
    def save_output_python_code(self, unique_id):
        """Save python code to files"""
        pre_statement_run = f"""import os
import sys
sys.path.append(os.path.abspath('/var/mycodo-root'))
from mycodo.mycodo_client import DaemonControl
control = DaemonControl()
output_id = '{unique_id}'

class OutputRun:
    def __init__(self, logger, output_id):
        self.logger = logger
        self.output_id = output_id
        self.variables = {{}}
        self.running = True

    def stop_output(self):
        self.running = False

    def output_code_run(self):
"""

        code_on_indented = textwrap.indent(self.on_command, ' ' * 8)
        full_command_on = pre_statement_run + code_on_indented

        code_off_indented = textwrap.indent(self.off_command, ' ' * 8)
        full_command_off = pre_statement_run + code_off_indented

        assure_path_exists(PATH_PYTHON_CODE_USER)
        file_run = '{}/output_on_{}.py'.format(PATH_PYTHON_CODE_USER,
                                               unique_id)
        with open(file_run, 'w') as fw:
            fw.write('{}\n'.format(full_command_on))
            fw.close()
        set_user_grp(file_run, 'mycodo', 'mycodo')

        file_run = '{}/output_off_{}.py'.format(PATH_PYTHON_CODE_USER,
                                                unique_id)
        with open(file_run, 'w') as fw:
            fw.write('{}\n'.format(full_command_off))
            fw.close()
        set_user_grp(file_run, 'mycodo', 'mycodo')
Example #20
0
    def get(self, unique_id, img_type):
        """get last camera image."""
        if not utils_general.user_has_permission('view_camera'):
            abort(403)

        camera = Camera.query.filter(Camera.unique_id == unique_id).first()

        if not camera:
            abort(422, custom='No camera with ID found')
        if img_type not in ["still", "timelapse"]:
            abort(422, custom='Type not "still" or "timelapse"')

        (latest_img_still_ts, latest_img_still_size, latest_img_still,
         latest_img_tl_ts, latest_img_tl_size, latest_img_tl,
         time_lapse_imgs) = utils_general.get_camera_image_info()

        camera_path = assure_path_exists(os.path.join(PATH_CAMERAS, unique_id))

        path = ""
        filename = ""
        if img_type == 'still':
            filename = latest_img_still[unique_id]
            if camera.path_still:
                path = camera.path_still
            else:
                path = os.path.join(camera_path, img_type)
        elif img_type == 'timelapse':
            filename = latest_img_tl[unique_id]
            if camera.path_timelapse:
                path = camera.path_timelapse
            else:
                path = os.path.join(camera_path, img_type)
        else:
            abort(422, custom=f'Unknown image type: {img_type}')

        if path and os.path.isdir(path):
            files = (files for files in os.listdir(path)
                     if os.path.isfile(os.path.join(path, files)))
        else:
            files = []

        try:
            if filename and filename in files:
                path_file = os.path.join(path, filename)
                if os.path.abspath(path_file).startswith(path):
                    return send_file(path_file, mimetype='image/jpeg')

            return abort(500)
        except Exception:
            abort(500,
                  message='An exception occurred',
                  error=traceback.format_exc())
Example #21
0
    def backup_measurements(self):
        influxd_version_out, _, _ = cmd_output('/usr/bin/influxd version')
        if influxd_version_out:
            influxd_version = influxd_version_out.decode('utf-8').split(' ')[1]
        else:
            influxd_version = "UNKNOWN"
        filename = 'Mycodo_{mv}_Influxdb_{iv}_{host}_{dt}.zip'.format(
            mv=MYCODO_VERSION,
            iv=influxd_version,
            host=socket.gethostname().replace(' ', ''),
            dt=datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
        path_save = os.path.join(PATH_MEASUREMENTS_BACKUP, filename)
        assure_path_exists(PATH_MEASUREMENTS_BACKUP)
        status, saved_path = create_measurements_export(save_path=path_save)
        if not status:
            self.logger.debug("Saved measurements file: "
                              "{}".format(saved_path))
        else:
            self.logger.debug("Could not create measurements file: "
                              "{}".format(saved_path))

        if self.backup_remove_measurements_archives:
            remove_files = "--remove-source-files "
        else:
            remove_files = ""
        rsync_cmd = "rsync {rem}-avz -e 'ssh -p {port}' {path_local} {user}@{host}:{remote_path}".format(
            rem=remove_files,
            port=self.ssh_port,
            path_local=PATH_MEASUREMENTS_BACKUP,
            user=self.remote_user,
            host=self.remote_host,
            remote_path=self.remote_backup_path)
        self.logger.debug("rsync command: {}".format(rsync_cmd))
        cmd_out, cmd_err, cmd_status = cmd_output(rsync_cmd,
                                                  timeout=self.rsync_timeout,
                                                  user=self.local_user)
        self.logger.debug(
            "rsync returned:\nOut: {}\nError: {}\nStatus: {}".format(
                cmd_out.decode(), cmd_err.decode(), cmd_status))
Example #22
0
def create_measurements_export(save_path=None):
    try:
        data = io.BytesIO()
        influx_backup_dir = os.path.join(INSTALL_DIRECTORY, 'influx_backup')

        # Delete influxdb directory if it exists
        if os.path.isdir(influx_backup_dir):
            shutil.rmtree(influx_backup_dir)

        # Create new directory (make sure it's empty)
        assure_path_exists(influx_backup_dir)

        cmd = "/usr/bin/influxd backup -database {db} -portable {path}".format(
            db=INFLUXDB_DATABASE, path=influx_backup_dir)
        _, _, status = cmd_output(cmd)

        if not status:
            # Zip all files in the influx_backup directory
            with zipfile.ZipFile(data, mode='w') as z:
                for _, _, files in os.walk(influx_backup_dir):
                    for filename in files:
                        z.write(os.path.join(influx_backup_dir, filename),
                                filename)
            data.seek(0)

            # Delete influxdb directory if it exists
            if os.path.isdir(influx_backup_dir):
                shutil.rmtree(influx_backup_dir)

            if save_path:
                with open(save_path, "wb") as f:
                    f.write(data.getbuffer())
                set_user_grp(save_path, 'mycodo', 'mycodo')
                return 0, save_path
            else:
                return 0, data
    except Exception as err:
        logger.error("Error: {}".format(err))
        return 1, err
Example #23
0
    def backup_settings(self):
        filename = 'Mycodo_{mver}_Settings_{aver}_{host}_{dt}.zip'.format(
            mver=MYCODO_VERSION,
            aver=ALEMBIC_VERSION,
            host=socket.gethostname().replace(' ', ''),
            dt=datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
        path_save = os.path.join(PATH_SETTINGS_BACKUP, filename)
        assure_path_exists(PATH_SETTINGS_BACKUP)
        if os.path.exists(path_save):
            self.logger.debug("Skipping backup of settings: "
                              "File already exists: {}".format(path_save))
        else:
            status, saved_path = create_settings_export(save_path=path_save)
            if not status:
                self.logger.debug("Saved settings file: "
                                  "{}".format(saved_path))
            else:
                self.logger.debug("Could not create settings file: "
                                  "{}".format(saved_path))

        if self.backup_remove_settings_archives:
            remove_files = "--remove-source-files "
        else:
            remove_files = ""
        rsync_cmd = "rsync {rem}-avz -e 'ssh -p {port}' {path_local} {user}@{host}:{remote_path}".format(
            rem=remove_files,
            port=self.ssh_port,
            path_local=PATH_SETTINGS_BACKUP,
            user=self.remote_user,
            host=self.remote_host,
            remote_path=self.remote_backup_path)
        self.logger.debug("rsync command: {}".format(rsync_cmd))
        cmd_out, cmd_err, cmd_status = cmd_output(rsync_cmd,
                                                  timeout=self.rsync_timeout,
                                                  user=self.local_user)
        self.logger.debug(
            "rsync returned:\nOut: {}\nError: {}\nStatus: {}".format(
                cmd_out.decode(), cmd_err.decode(), cmd_status))
Example #24
0
def setup_profiler(app):
    """
    Set up a profiler
    Outputs to file and stream
    See profile_analyzer.py in Mycodo/mycodo/scripts/
    """
    app.config['PROFILE'] = True
    new = 'profile-{dt:%Y-%m-%d_%H:%M:%S}'.format(dt=datetime.datetime.now())
    profile_path = assure_path_exists(os.path.join(INSTALL_DIRECTORY, new))
    profile_log = os.path.join(profile_path, 'profile.log')
    profile_log_file = open(profile_log, 'w')
    stream = MergeStream(sys.stdout, profile_log_file)
    app.wsgi_app = ProfilerMiddleware(app.wsgi_app, stream, restrictions=[30])
    return app
Example #25
0
def setup_profiler(app):
    """
    Set up a profiler
    Outputs to file and stream
    See profile_analyzer.py in Mycodo/mycodo/scripts/
    """
    from werkzeug.contrib.profiler import MergeStream
    from werkzeug.contrib.profiler import ProfilerMiddleware
    app.config['PROFILE'] = True
    new = 'profile-{dt:%Y-%m-%d_%H:%M:%S}'.format(
        dt=datetime.datetime.now())
    profile_path = assure_path_exists(os.path.join(INSTALL_DIRECTORY, new))
    profile_log = os.path.join(profile_path, 'profile.log')
    profile_log_file = open(profile_log, 'w')
    stream = MergeStream(sys.stdout, profile_log_file)
    app.wsgi_app = ProfilerMiddleware(app.wsgi_app, stream, restrictions=[30])
    return app
Example #26
0
def camera_img_return_path(camera_unique_id, img_type, filename):
    """Return an image from stills or timelapses"""
    camera = Camera.query.filter(Camera.unique_id == camera_unique_id).first()
    camera_path = assure_path_exists(
        os.path.join(PATH_CAMERAS, '{uid}'.format(uid=camera.unique_id)))

    if img_type in ['still', 'timelapse']:
        path = os.path.join(camera_path, img_type)
        if os.path.isdir(path):
            files = (files for files in os.listdir(path)
                if os.path.isfile(os.path.join(path, files)))
        else:
            files = []
        if filename in files:
            path_file = os.path.join(path, filename)
            return send_file(path_file, mimetype='image/jpeg')

    return "Image not found"
Example #27
0
def camera_img_latest_timelapse(camera_unique_id, max_age):
    """Capture an image and resturn the filename"""
    _, _, tl_ts, tl_path = utils_general.get_camera_image_info()
    if camera_unique_id in tl_path and tl_path[camera_unique_id]:
        camera_path = assure_path_exists(
            os.path.join(PATH_CAMERAS, '{uid}/timelapse'.format(
                uid=camera_unique_id)))
        image_path_full = os.path.join(camera_path, tl_path[camera_unique_id])
        try:
            timestamp = os.path.getctime(image_path_full)
            time_max_age = datetime.datetime.now() - datetime.timedelta(seconds=int(max_age))
            if datetime.datetime.fromtimestamp(timestamp) > time_max_age:
                return_values = '["{}","{}"]'.format(tl_path[camera_unique_id],
                                                     tl_ts[camera_unique_id])
            else:
                return_values = '["max_age_exceeded"]'
        except OSError:
            return_values = '["file_not_found"]'
    else:
        return_values = '["file_not_found"]'
    return Response(return_values, mimetype='text/json')
Example #28
0
def camera_img_latest_timelapse(camera_unique_id, max_age):
    """Capture an image and/or return a filename"""
    _, _, tl_ts, tl_path, _ = utils_general.get_camera_image_info()
    if camera_unique_id in tl_path and tl_path[camera_unique_id]:
        camera_path = assure_path_exists(
            os.path.join(PATH_CAMERAS, '{uid}/timelapse'.format(
                uid=camera_unique_id)))
        image_path_full = os.path.join(camera_path, tl_path[camera_unique_id])
        try:
            timestamp = os.path.getctime(image_path_full)
            time_max_age = datetime.datetime.now() - datetime.timedelta(seconds=int(max_age))
            if datetime.datetime.fromtimestamp(timestamp) > time_max_age:
                return_values = '["{}","{}"]'.format(tl_path[camera_unique_id],
                                                     tl_ts[camera_unique_id])
            else:
                return_values = '["max_age_exceeded"]'
        except OSError:
            return_values = '["file_not_found"]'
    else:
        return_values = '["file_not_found"]'
    return Response(return_values, mimetype='text/json')
Example #29
0
    def get_measurement(self):
        """Gets the AMG8833's measurements."""
        if not self.sensor:
            self.logger.error(
                "Error 101: Device not set up. See https://kizniche.github.io/Mycodo/Error-Codes#error-101 for more info."
            )
            return

        self.return_dict = copy.deepcopy(measurements_dict)

        pixels = self.sensor.readPixels()

        if self.report:
            self.logger.error("Min Pixel = {0} C".format(min(pixels)))
            self.logger.error("Max Pixel = {0} C".format(max(pixels)))
            self.logger.error("Thermistor = {0} C".format(
                self.sensor.readThermistor()))

        for channel in self.channels_measurement:
            self.value_set(channel, pixels[channel])

        if self.save_image:
            timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
            assure_path_exists(PATH_CAMERAS)
            camera_path = assure_path_exists(
                os.path.join(PATH_CAMERAS,
                             '{uid}'.format(uid=self.input_dev.unique_id)))
            filename = 'Still-{uid}-{ts}.jpg'.format(
                uid=self.input_dev.unique_id, ts=timestamp).replace(" ", "_")
            save_path = assure_path_exists(os.path.join(
                camera_path, 'thermal'))
            assure_path_exists(save_path)
            path_file = os.path.join(save_path, filename)
            generate_thermal_image_from_pixels(pixels,
                                               self.nx,
                                               self.ny,
                                               path_file,
                                               scale=self.scale,
                                               temp_min=self.temp_min,
                                               temp_max=self.temp_max)

        return self.return_dict
Example #30
0
    def get_measurement(self):
        """ Gets the AMG8833's measurements """
        self.return_dict = measurements_dict.copy()

        pixels = self.sensor.readPixels()

        if self.report:
            self.logger.error("Min Pixel = {0} C".format(min(pixels)))
            self.logger.error("Max Pixel = {0} C".format(max(pixels)))
            self.logger.error("Thermistor = {0} C".format(
                self.sensor.readThermistor()))

        for channel in self.channels_measurement:
            if self.is_enabled(channel):
                self.value_set(channel, pixels[channel])

        if self.save_image:
            timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
            assure_path_exists(PATH_CAMERAS)
            camera_path = assure_path_exists(
                os.path.join(PATH_CAMERAS,
                             '{uid}'.format(uid=self.input_dev.unique_id)))
            filename = 'Still-{uid}-{ts}.jpg'.format(
                uid=self.input_dev.unique_id, ts=timestamp).replace(" ", "_")
            save_path = assure_path_exists(os.path.join(
                camera_path, 'thermal'))
            assure_path_exists(save_path)
            path_file = os.path.join(save_path, filename)
            generate_thermal_image_from_pixels(pixels,
                                               self.nx,
                                               self.ny,
                                               path_file,
                                               scale=self.scale,
                                               temp_min=self.temp_min,
                                               temp_max=self.temp_max)

        return self.return_dict
Example #31
0
    def get_measurement(self):
        """ Gets the AMG8833's measurements """
        return_dict = measurements_dict.copy()

        pixels = self.sensor.readPixels()

        if self.report:
            self.logger.error("Min Pixel = {0} C".format(min(pixels)))
            self.logger.error("Max Pixel = {0} C".format(max(pixels)))
            self.logger.error("Thermistor = {0} C".format(self.sensor.readThermistor()))

        for meas in self.device_measurements.all():
            if meas.is_enabled:
                return_dict[meas.channel]['value'] = pixels[meas.channel]

        if self.save_image:
            timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
            assure_path_exists(PATH_CAMERAS)
            camera_path = assure_path_exists(
                os.path.join(PATH_CAMERAS, '{uid}'.format(uid=self.input_dev.unique_id)))
            filename = 'Still-{uid}-{ts}.jpg'.format(
                uid=self.input_dev.unique_id,
                ts=timestamp).replace(" ", "_")
            save_path = assure_path_exists(os.path.join(camera_path, 'thermal'))
            assure_path_exists(save_path)
            path_file = os.path.join(save_path, filename)
            generate_thermal_image_from_pixels(
                pixels,
                self.nx,
                self.ny,
                path_file,
                scale=self.scale,
                temp_min=self.temp_min,
                temp_max=self.temp_max)

        return return_dict
Example #32
0
def import_influxdb(form):
    """
    Receive a zip file contatining influx metastore and database that was
    exported with export_influxdb(), then import the metastore and database
    in InfluxDB.
    """
    action = '{action} {controller}'.format(
        action=TRANSLATIONS['import']['title'],
        controller="Influxdb")
    error = []

    try:
        correct_format = 'Mycodo_MYCODOVERSION_Influxdb_INFLUXVERSION_HOST_DATETIME.zip'
        upload_folder = os.path.join(INSTALL_DIRECTORY, 'upload')
        tmp_folder = os.path.join(upload_folder, 'mycodo_influx_tmp')
        full_path = None

        if not form.influxdb_import_file.data:
            error.append('No file present')
        elif form.influxdb_import_file.data.filename == '':
            error.append('No file name')
        else:
            # Split the uploaded file into parts
            file_name = form.influxdb_import_file.data.filename
            name = file_name.rsplit('.', 1)[0]
            extension = file_name.rsplit('.', 1)[1].lower()
            name_split = name.split('_')

            # Split the correctly-formatted filename into parts
            correct_name = correct_format.rsplit('.', 1)[0]
            correct_name_1 = correct_name.split('_')[0]
            correct_name_2 = correct_name.split('_')[2]
            correct_extension = correct_format.rsplit('.', 1)[1].lower()

            # Compare the uploaded filename parts to the correct parts
            try:
                if name_split[0] != correct_name_1:
                    error.append(
                        "Invalid file name: {n}: {fn} != {cn}.".format(
                            n=file_name,
                            fn=name_split[0],
                            cn=correct_name_1))
                    error.append("Correct format is: {fmt}".format(
                        fmt=correct_format))
                elif name_split[2] != correct_name_2:
                    error.append(
                        "Invalid file name: {n}: {fn} != {cn}".format(
                            n=file_name,
                            fn=name_split[2],
                            cn=correct_name_2))
                    error.append("Correct format is: {fmt}".format(
                        fmt=correct_format))
                elif extension != correct_extension:
                    error.append("Extension not 'zip'")
            except Exception as err:
                error.append(
                    "Exception while verifying file name: "
                    "{err}".format(err=err))

        if not error:
            # Save file to upload directory
            filename = secure_filename(
                form.influxdb_import_file.data.filename)
            full_path = os.path.join(tmp_folder, filename)
            assure_path_exists(tmp_folder)
            assure_path_exists(tmp_folder)
            form.influxdb_import_file.data.save(
                os.path.join(tmp_folder, filename))

            # Check if contents of zip file are correct
            try:
                file_list = zipfile.ZipFile(full_path, 'r').namelist()
                if not any("meta." in s for s in file_list):
                    error.append(
                        "Metastore not found: No 'meta.*' files found "
                        "in archive")
                elif not any("mycodo_db.autogen." in s for s in file_list):
                    error.append(
                        "Databases not found: No 'mycodo_db.autogen.*' "
                        "files found in archive")
            except Exception as err:
                error.append("Exception while opening zip file: "
                             "{err}".format(err=err))

        if not error:
            # Unzip file
            try:
                zip_ref = zipfile.ZipFile(full_path, 'r')
                zip_ref.extractall(tmp_folder)
                zip_ref.close()
            except Exception as err:
                error.append("Exception while extracting zip file: "
                             "{err}".format(err=err))

        if not error:
            try:
                # Stop influxdb and Mycodo daemon (backend) from
                # running (influxdb must be stopped to restore database)
                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "daemon_stop".format(
                    pth=INSTALL_DIRECTORY)
                out, _, _ = cmd_output(cmd)

                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "influxdb_stop".format(
                    pth=INSTALL_DIRECTORY)
                out, _, _ = cmd_output(cmd)

                # Import the mestastore and database
                output_successes = []
                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "influxdb_restore_metastore {dir}".format(
                    pth=INSTALL_DIRECTORY, dir=tmp_folder)
                out, _, _ = cmd_output(cmd)
                if out:
                    output_successes.append(out.decode('utf-8'))

                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "influxdb_restore_database {dir}".format(
                    pth=INSTALL_DIRECTORY, dir=tmp_folder)
                out, _, _ = cmd_output(cmd)
                if out:
                    output_successes.append(out.decode('utf-8'))

                # Start influxdb and Mycodo daemon (backend)
                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "influxdb_start".format(
                    pth=INSTALL_DIRECTORY)
                out, _, _ = cmd_output(cmd)

                time.sleep(2)

                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "daemon_start".format(
                    pth=INSTALL_DIRECTORY)
                out, _, _ = cmd_output(cmd)

                # Delete tmp directory if it exists
                if os.path.isdir(tmp_folder):
                    shutil.rmtree(tmp_folder)

                if all(output_successes):  # Success!
                    output_successes.append(
                        "InfluxDB metastore and database successfully "
                        "imported")
                    return output_successes
            except Exception as err:
                error.append(
                    "Exception while importing metastore and database: "
                    "{err}".format(err=err))

    except Exception as err:
        error.append("Exception: {}".format(err))

    flash_success_errors(error, action, url_for('routes_page.page_export'))
Example #33
0
def import_settings(form):
    """
    Receive a zip file containing a Mycodo settings database that was
    exported with export_settings(), then back up the current Mycodo settings
    database and implement the one form the zip in its's place.
    """
    action = '{action} {controller}'.format(
        action=TRANSLATIONS['import']['title'],
        controller=TRANSLATIONS['settings']['title'])
    error = []

    try:
        correct_format = 'Mycodo_MYCODOVERSION_Settings_DBVERSION_HOST_DATETIME.zip'
        upload_folder = os.path.join(INSTALL_DIRECTORY, 'upload')
        tmp_folder = os.path.join(upload_folder, 'mycodo_db_tmp')
        mycodo_database_name = 'mycodo.db'
        full_path = None

        if not form.settings_import_file.data:
            error.append('No file present')
        elif form.settings_import_file.data.filename == '':
            error.append('No file name')
        else:
            # Split the uploaded file into parts
            file_name = form.settings_import_file.data.filename
            name = file_name.rsplit('.', 1)[0]
            extension = file_name.rsplit('.', 1)[1].lower()
            name_split = name.split('_')

            # Split the correctly-formatted filename into parts
            correct_name = correct_format.rsplit('.', 1)[0]
            correct_name_1 = correct_name.split('_')[0]
            correct_name_2 = correct_name.split('_')[2]
            correct_extension = correct_format.rsplit('.', 1)[1].lower()

            # Compare the uploaded filename parts to the correct parts
            try:
                if name_split[0] != correct_name_1:
                    error.append(
                        "Invalid file name: {n}: {fn} != {cn}.".format(
                            n=file_name,
                            fn=name_split[0],
                            cn=correct_name_1))
                    error.append("Correct format is: {fmt}".format(
                        fmt=correct_format))
                elif name_split[2] != correct_name_2:
                    error.append(
                        "Invalid file name: {n}: {fn} != {cn}".format(
                            n=file_name,
                            fn=name_split[2],
                            cn=correct_name_2))
                    error.append("Correct format is: {fmt}".format(
                        fmt=correct_format))
                elif extension != correct_extension:
                    error.append("Extension not 'zip'")
                elif name_split[1] != MYCODO_VERSION:
                    error.append("Invalid Mycodo version: {fv} != {mv}. "
                                 "This database can only be imported to "
                                 "Mycodo version {mver}".format(
                        fv=name_split[1],
                        mv=MYCODO_VERSION,
                        mver=name_split[1]))
                elif name_split[3] != ALEMBIC_VERSION:
                    error.append("Invalid database version: {fv} != {dv}."
                                 " This database can only be imported to"
                                 " Mycodo version {mver}".format(
                        fv=name_split[3],
                        dv=ALEMBIC_VERSION,
                        mver=name_split[1]))
            except Exception as err:
                error.append(
                    "Exception while verifying file name: {err}".format(err=err))

        if not error:
            # Save file to upload directory
            filename = secure_filename(
                form.settings_import_file.data.filename)
            full_path = os.path.join(tmp_folder, filename)
            assure_path_exists(upload_folder)
            assure_path_exists(tmp_folder)
            form.settings_import_file.data.save(
                os.path.join(tmp_folder, filename))

            # Check if contents of zip file are correct
            try:
                file_list = zipfile.ZipFile(full_path, 'r').namelist()
                if len(file_list) > 1:
                    error.append("Incorrect number of files in zip: "
                                 "{an} != 1".format(an=len(file_list)))
                elif file_list[0] != mycodo_database_name:
                    error.append("Incorrect file in zip: {af} != {cf}".format(
                        af=file_list[0], cf=mycodo_database_name))
            except Exception as err:
                error.append("Exception while opening zip file: "
                             "{err}".format(err=err))

        if not error:
            # Unzip file
            try:
                zip_ref = zipfile.ZipFile(full_path, 'r')
                zip_ref.extractall(tmp_folder)
                zip_ref.close()
            except Exception as err:
                error.append("Exception while extracting zip file: "
                             "{err}".format(err=err))

        if not error:
            try:
                # Stop Mycodo daemon (backend)
                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "daemon_stop".format(
                        pth=INSTALL_DIRECTORY)
                _, _, _ = cmd_output(cmd)

                # Backup current database and replace with extracted mycodo.db
                imported_database = os.path.join(
                    tmp_folder, mycodo_database_name)
                backup_name = (
                        SQL_DATABASE_MYCODO + '.backup_' +
                        datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
                os.rename(SQL_DATABASE_MYCODO, backup_name)
                os.rename(imported_database, SQL_DATABASE_MYCODO)

                # Start Mycodo daemon (backend)
                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "daemon_start".format(
                        pth=INSTALL_DIRECTORY)
                _, _, _ = cmd_output(cmd)

                # Delete tmp directory if it exists
                if os.path.isdir(tmp_folder):
                    shutil.rmtree(tmp_folder)

                return backup_name  # Success!
            except Exception as err:
                error.append("Exception while replacing database: "
                             "{err}".format(err=err))

    except Exception as err:
        error.append("Exception: {}".format(err))

    flash_success_errors(error, action, url_for('routes_page.page_export'))
Example #34
0
def camera_record(record_type, unique_id, duration_sec=None, tmp_filename=None):
    """
    Record still image from cameras
    :param record_type:
    :param unique_id:
    :param duration_sec:
    :param tmp_filename:
    :return:
    """
    daemon_control = None
    settings = db_retrieve_table_daemon(Camera, unique_id=unique_id)
    timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
    assure_path_exists(PATH_CAMERAS)
    camera_path = assure_path_exists(
        os.path.join(PATH_CAMERAS, '{uid}'.format(uid=settings.unique_id)))
    if record_type == 'photo':
        if settings.path_still != '':
            save_path = settings.path_still
        else:
            save_path = assure_path_exists(os.path.join(camera_path, 'still'))
        filename = 'Still-{cam_id}-{cam}-{ts}.jpg'.format(
            cam_id=settings.id,
            cam=settings.name,
            ts=timestamp).replace(" ", "_")
    elif record_type == 'timelapse':
        if settings.path_timelapse != '':
            save_path = settings.path_timelapse
        else:
            save_path = assure_path_exists(os.path.join(camera_path, 'timelapse'))
        start = datetime.datetime.fromtimestamp(
            settings.timelapse_start_time).strftime("%Y-%m-%d_%H-%M-%S")
        filename = 'Timelapse-{cam_id}-{cam}-{st}-img-{cn:05d}.jpg'.format(
            cam_id=settings.id,
            cam=settings.name,
            st=start,
            cn=settings.timelapse_capture_number).replace(" ", "_")
    elif record_type == 'video':
        if settings.path_video != '':
            save_path = settings.path_video
        else:
            save_path = assure_path_exists(os.path.join(camera_path, 'video'))
        filename = 'Video-{cam}-{ts}.h264'.format(
            cam=settings.name,
            ts=timestamp).replace(" ", "_")
    else:
        return

    assure_path_exists(save_path)

    if tmp_filename:
        filename = tmp_filename

    path_file = os.path.join(save_path, filename)

    # Turn on output, if configured
    if settings.output_id:
        daemon_control = DaemonControl()
        daemon_control.output_on(settings.output_id)

    # Pause while the output remains on for the specified duration.
    # Used for instance to allow fluorescent lights to fully turn on before
    # capturing an image.
    if settings.output_duration:
        time.sleep(settings.output_duration)

    if settings.library == 'picamera':
        # Try 5 times to access the pi camera (in case another process is accessing it)
        for _ in range(5):
            try:
                with picamera.PiCamera() as camera:
                    camera.resolution = (settings.width, settings.height)
                    camera.hflip = settings.hflip
                    camera.vflip = settings.vflip
                    camera.rotation = settings.rotation
                    camera.brightness = int(settings.brightness)
                    camera.contrast = int(settings.contrast)
                    camera.exposure_compensation = int(settings.exposure)
                    camera.saturation = int(settings.saturation)
                    camera.start_preview()
                    time.sleep(2)  # Camera warm-up time

                    if record_type in ['photo', 'timelapse']:
                        camera.capture(path_file, use_video_port=False)
                    elif record_type == 'video':
                        camera.start_recording(path_file, format='h264', quality=20)
                        camera.wait_recording(duration_sec)
                        camera.stop_recording()
                    else:
                        return
                    break
            except picamera.exc.PiCameraMMALError:
                logger.error("The camera is already open by picamera. Retrying 4 times.")
            time.sleep(1)

    elif settings.library == 'fswebcam':
        cmd = "/usr/bin/fswebcam --device {dev} --resolution {w}x{h} --set brightness={bt}% " \
              "--no-banner --save {file}".format(dev=settings.device,
                                                 w=settings.width,
                                                 h=settings.height,
                                                 bt=settings.brightness,
                                                 file=path_file)
        if settings.hflip:
            cmd += " --flip h"
        if settings.vflip:
            cmd += " --flip h"
        if settings.rotation:
            cmd += " --rotate {angle}".format(angle=settings.rotation)
        if settings.custom_options:
            cmd += " " + settings.custom_options

        out, err, status = cmd_output(cmd, stdout_pipe=False)
        # logger.error("TEST01: {}; {}; {}; {}".format(cmd, out, err, status))

    # Turn off output, if configured
    if settings.output_id and daemon_control:
        daemon_control.output_off(settings.output_id)

    try:
        set_user_grp(path_file, 'mycodo', 'mycodo')
        return save_path, filename
    except Exception as e:
        logger.exception(
            "Exception raised in 'camera_record' when setting user grp: "
            "{err}".format(err=e))
Example #35
0
def import_notes(form):
    """
    Receive a zip file containing a CSV file and note attachments
    """
    action = '{action} {controller}'.format(
        action=gettext("Import"),
        controller=TRANSLATIONS['note']['title'])
    error = []

    upload_folder = os.path.join(INSTALL_DIRECTORY, 'upload')
    tmp_folder = os.path.join(upload_folder, 'mycodo_notes_tmp')
    full_path = None

    try:
        if not form.notes_import_file.data:
            error.append('No file present')
        elif form.notes_import_file.data.filename == '':
            error.append('No file name')

        if not error:
            # Save file to upload directory
            filename = secure_filename(
                form.notes_import_file.data.filename)
            full_path = os.path.join(tmp_folder, filename)
            assure_path_exists(upload_folder)
            assure_path_exists(tmp_folder)
            form.notes_import_file.data.save(
                os.path.join(tmp_folder, filename))

            # Unzip file
            try:
                zip_ref = zipfile.ZipFile(full_path, 'r')
                zip_ref.extractall(tmp_folder)
                zip_ref.close()
            except Exception as err:
                logger.exception(1)
                error.append("Exception while extracting zip file: "
                             "{err}".format(err=err))

        if not error:
            found_csv = False
            for each_file in os.listdir(tmp_folder):
                if each_file.endswith('_notes_exported.csv') and not found_csv:
                    found_csv = True
                    count_notes = 0
                    count_notes_skipped = 0
                    count_attach = 0
                    logger.error(each_file)

                    file_csv = os.path.join(tmp_folder, each_file)
                    path_attachments = os.path.join(tmp_folder, 'attachments')

                    with open(file_csv, 'r' ) as theFile:
                        reader = csv.DictReader(theFile)
                        for line in reader:
                            if not Notes.query.filter(Notes.unique_id == line['UUID']).count():
                                count_notes += 1

                                new_note = Notes()
                                new_note.unique_id = line['UUID']
                                new_note.date_time = datetime.strptime(line['Time'], '%Y-%m-%d %H:%M:%S')
                                new_note.name = line['Name']
                                new_note.note = line['Note']

                                tag_ids = []
                                tags = {}
                                for each_tag in line['Tags'].split(';'):
                                    tags[each_tag.split(',')[0]] = each_tag.split(',')[1]
                                    tag_ids.append(each_tag.split(',')[0])

                                for each_tag_id, each_tag_name in tags.items():
                                    if (not NoteTags.query.filter(NoteTags.unique_id == each_tag_id).count() and
                                            not NoteTags.query.filter(NoteTags.name == each_tag_name).count()):
                                        new_tag = NoteTags()
                                        new_tag.unique_id = each_tag_id
                                        new_tag.name = each_tag_name
                                        new_tag.save()

                                    elif (not NoteTags.query.filter(NoteTags.unique_id == each_tag_id).count() and
                                            NoteTags.query.filter(NoteTags.name == each_tag_name).count()):
                                        new_tag = NoteTags()
                                        new_tag.unique_id = each_tag_id
                                        new_tag.name = each_tag_name + str(uuid.uuid4())[:8]
                                        new_tag.save()

                                new_note.tags = ','.join(tag_ids)
                                new_note.files = line['Files']
                                new_note.save()

                                for each_file in line['Files'].split(','):
                                    count_attach += 1
                                    os.rename(os.path.join(path_attachments, each_file),
                                              os.path.join(PATH_NOTE_ATTACHMENTS, each_file))
                            else:
                                count_notes_skipped += 1

                    if (count_notes + count_attach) == 0:
                        error.append("0 imported, {notes} skipped".format(
                            notes=count_notes_skipped))
                    else:
                        flash("Imported {notes} notes and {attach} "
                              "attachments".format(notes=count_notes,
                                                   attach=count_attach),
                              "success")

            if not found_csv:
                error.append("Cannot import notes: Could not find CSV file in ZIP archive.")

    except Exception as err:
        error.append("Exception: {}".format(err))
    finally:
        if os.path.isdir(tmp_folder):
            shutil.rmtree(tmp_folder)  # Delete tmp directory

    flash_success_errors(error, action, url_for('routes_page.page_export'))
Example #36
0
def generate_output_usage_report():
    """
    Generate output usage report in a csv file

    """
    logger.debug("Generating output usage report...")
    try:
        assure_path_exists(USAGE_REPORTS_PATH)

        misc = db_retrieve_table_daemon(Misc, entry='first')
        output = db_retrieve_table_daemon(Output)
        output_usage = return_output_usage(misc, output.all())

        timestamp = time.strftime("%Y-%m-%d_%H-%M")
        file_name = 'output_usage_report_{ts}.csv'.format(ts=timestamp)
        report_path_file = os.path.join(USAGE_REPORTS_PATH, file_name)

        with open(report_path_file, 'wb') as f:
            w = csv.writer(f)
            # Header row
            w.writerow([
                 'Relay ID',
                 'Relay Unique ID',
                 'Relay Name',
                 'Type',
                 'Past Day',
                 'Past Week',
                 'Past Month',
                 'Past Month (from {})'.format(misc.output_usage_dayofmonth),
                 'Past Year'
            ])
            for key, value in output_usage.items():
                if key in ['total_duration', 'total_cost', 'total_kwh']:
                    # Totals rows
                    w.writerow(['', '', '',
                                key,
                                value['1d'],
                                value['1w'],
                                value['1m'],
                                value['1m_date'],
                                value['1y']])
                else:
                    # Each output rows
                    each_output = output.filter(Output.unique_id == key).first()
                    w.writerow([each_output.unique_id,
                                each_output.unique_id,
                                str(each_output.name).encode("utf-8"),
                                'hours_on',
                                value['1d']['hours_on'],
                                value['1w']['hours_on'],
                                value['1m']['hours_on'],
                                value['1m_date']['hours_on'],
                                value['1y']['hours_on']])
                    w.writerow([each_output.unique_id,
                                each_output.unique_id,
                                str(each_output.name).encode("utf-8"),
                                'kwh',
                                value['1d']['kwh'],
                                value['1w']['kwh'],
                                value['1m']['kwh'],
                                value['1m_date']['kwh'],
                                value['1y']['kwh']])
                    w.writerow([each_output.unique_id,
                                each_output.unique_id,
                                str(each_output.name).encode("utf-8"),
                                'cost',
                                value['1d']['cost'],
                                value['1w']['cost'],
                                value['1m']['cost'],
                                value['1m_date']['cost'],
                                value['1y']['cost']])

        set_user_grp(report_path_file, 'mycodo', 'mycodo')
    except Exception:
        logger.exception("Energy Usage Report Generation ERROR")
Example #37
0
def camera_record(record_type,
                  unique_id,
                  duration_sec=None,
                  tmp_filename=None):
    """
    Record still image from cameras
    :param record_type:
    :param unique_id:
    :param duration_sec:
    :param tmp_filename:
    :return:
    """
    daemon_control = None
    settings = db_retrieve_table_daemon(Camera, unique_id=unique_id)
    timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
    assure_path_exists(PATH_CAMERAS)
    camera_path = assure_path_exists(
        os.path.join(PATH_CAMERAS, '{uid}'.format(uid=settings.unique_id)))
    if record_type == 'photo':
        if settings.path_still:
            save_path = settings.path_still
        else:
            save_path = assure_path_exists(os.path.join(camera_path, 'still'))
        filename = 'Still-{cam_id}-{cam}-{ts}.jpg'.format(
            cam_id=settings.id, cam=settings.name,
            ts=timestamp).replace(" ", "_")
    elif record_type == 'timelapse':
        if settings.path_timelapse:
            save_path = settings.path_timelapse
        else:
            save_path = assure_path_exists(
                os.path.join(camera_path, 'timelapse'))
        start = datetime.datetime.fromtimestamp(
            settings.timelapse_start_time).strftime("%Y-%m-%d_%H-%M-%S")
        filename = 'Timelapse-{cam_id}-{cam}-{st}-img-{cn:05d}.jpg'.format(
            cam_id=settings.id,
            cam=settings.name,
            st=start,
            cn=settings.timelapse_capture_number).replace(" ", "_")
    elif record_type == 'video':
        if settings.path_video:
            save_path = settings.path_video
        else:
            save_path = assure_path_exists(os.path.join(camera_path, 'video'))
        filename = 'Video-{cam}-{ts}.h264'.format(cam=settings.name,
                                                  ts=timestamp).replace(
                                                      " ", "_")
    else:
        return

    assure_path_exists(save_path)

    if tmp_filename:
        filename = tmp_filename

    path_file = os.path.join(save_path, filename)

    # Turn on output, if configured
    output_already_on = False
    output_id = None
    output_channel_id = None
    output_channel = None
    if settings.output_id and ',' in settings.output_id:
        output_id = settings.output_id.split(",")[0]
        output_channel_id = settings.output_id.split(",")[1]
        output_channel = db_retrieve_table_daemon(OutputChannel,
                                                  unique_id=output_channel_id)

    if output_id and output_channel:
        daemon_control = DaemonControl()
        if daemon_control.output_state(
                output_id, output_channel=output_channel.channel) == "on":
            output_already_on = True
        else:
            daemon_control.output_on(output_id,
                                     output_channel=output_channel.channel)

    # Pause while the output remains on for the specified duration.
    # Used for instance to allow fluorescent lights to fully turn on before
    # capturing an image.
    if settings.output_duration:
        time.sleep(settings.output_duration)

    if settings.library == 'picamera':
        import picamera

        # Try 5 times to access the pi camera (in case another process is accessing it)
        for _ in range(5):
            try:
                with picamera.PiCamera() as camera:
                    camera.resolution = (settings.width, settings.height)
                    camera.hflip = settings.hflip
                    camera.vflip = settings.vflip
                    camera.rotation = settings.rotation
                    camera.brightness = int(settings.brightness)
                    camera.contrast = int(settings.contrast)
                    camera.exposure_compensation = int(settings.exposure)
                    camera.saturation = int(settings.saturation)
                    camera.shutter_speed = settings.picamera_shutter_speed
                    camera.sharpness = settings.picamera_sharpness
                    camera.iso = settings.picamera_iso
                    camera.awb_mode = settings.picamera_awb
                    if settings.picamera_awb == 'off':
                        camera.awb_gains = (settings.picamera_awb_gain_red,
                                            settings.picamera_awb_gain_blue)
                    camera.exposure_mode = settings.picamera_exposure_mode
                    camera.meter_mode = settings.picamera_meter_mode
                    camera.image_effect = settings.picamera_image_effect

                    camera.start_preview()
                    time.sleep(2)  # Camera warm-up time

                    if record_type in ['photo', 'timelapse']:
                        camera.capture(path_file, use_video_port=False)
                    elif record_type == 'video':
                        camera.start_recording(path_file,
                                               format='h264',
                                               quality=20)
                        camera.wait_recording(duration_sec)
                        camera.stop_recording()
                    else:
                        return
                    break
            except picamera.exc.PiCameraMMALError:
                logger.error(
                    "The camera is already open by picamera. Retrying 4 times."
                )
            time.sleep(1)

    elif settings.library == 'fswebcam':
        cmd = "/usr/bin/fswebcam --device {dev} --resolution {w}x{h} --set brightness={bt}% " \
              "--no-banner --save {file}".format(dev=settings.device,
                                                 w=settings.width,
                                                 h=settings.height,
                                                 bt=settings.brightness,
                                                 file=path_file)
        if settings.hflip:
            cmd += " --flip h"
        if settings.vflip:
            cmd += " --flip h"
        if settings.rotation:
            cmd += " --rotate {angle}".format(angle=settings.rotation)
        if settings.custom_options:
            cmd += " {}".format(settings.custom_options)

        out, err, status = cmd_output(cmd, stdout_pipe=False, user='******')
        logger.debug("Camera debug message: "
                     "cmd: {}; out: {}; error: {}; status: {}".format(
                         cmd, out, err, status))

    elif settings.library == 'opencv':
        import cv2
        import imutils

        cap = cv2.VideoCapture(settings.opencv_device)
        cap.set(cv2.CAP_PROP_FRAME_WIDTH, settings.width)
        cap.set(cv2.CAP_PROP_FRAME_HEIGHT, settings.height)
        cap.set(cv2.CAP_PROP_EXPOSURE, settings.exposure)
        cap.set(cv2.CAP_PROP_GAIN, settings.gain)
        cap.set(cv2.CAP_PROP_BRIGHTNESS, settings.brightness)
        cap.set(cv2.CAP_PROP_CONTRAST, settings.contrast)
        cap.set(cv2.CAP_PROP_HUE, settings.hue)
        cap.set(cv2.CAP_PROP_SATURATION, settings.saturation)

        # Check if image can be read
        status, _ = cap.read()
        if not status:
            logger.error("Cannot detect USB camera with device '{dev}'".format(
                dev=settings.opencv_device))
            return

        # Discard a few frames to allow camera to adjust to settings
        for _ in range(2):
            cap.read()

        if record_type in ['photo', 'timelapse']:
            edited = False
            status, img_orig = cap.read()
            cap.release()

            if not status:
                logger.error("Could not acquire image")
                return

            img_edited = img_orig.copy()

            if any((settings.hflip, settings.vflip, settings.rotation)):
                edited = True

            if settings.hflip and settings.vflip:
                img_edited = cv2.flip(img_orig, -1)
            elif settings.hflip:
                img_edited = cv2.flip(img_orig, 1)
            elif settings.vflip:
                img_edited = cv2.flip(img_orig, 0)

            if settings.rotation:
                img_edited = imutils.rotate_bound(img_orig, settings.rotation)

            if edited:
                cv2.imwrite(path_file, img_edited)
            else:
                cv2.imwrite(path_file, img_orig)

        elif record_type == 'video':
            # TODO: opencv video recording is currently not working. No idea why. Try to fix later.
            try:
                cap = cv2.VideoCapture(settings.opencv_device)
                fourcc = cv2.CV_FOURCC('X', 'V', 'I', 'D')
                resolution = (settings.width, settings.height)
                out = cv2.VideoWriter(path_file, fourcc, 20.0, resolution)

                time_end = time.time() + duration_sec
                while cap.isOpened() and time.time() < time_end:
                    ret, frame = cap.read()
                    if ret:
                        # write the frame
                        out.write(frame)
                        if cv2.waitKey(1) & 0xFF == ord('q'):
                            break
                    else:
                        break
                cap.release()
                out.release()
                cv2.destroyAllWindows()
            except Exception as e:
                logger.exception("Exception raised while recording video: "
                                 "{err}".format(err=e))
        else:
            return

    elif settings.library == 'http_address':
        import cv2
        import imutils
        from urllib.error import HTTPError
        from urllib.parse import urlparse
        from urllib.request import urlretrieve

        if record_type in ['photo', 'timelapse']:
            path_tmp = "/tmp/tmpimg.jpg"

            # Get filename and extension, if available
            a = urlparse(settings.url_still)
            filename = os.path.basename(a.path)
            if filename:
                path_tmp = "/tmp/{}".format(filename)

            try:
                os.remove(path_tmp)
            except FileNotFoundError:
                pass

            try:
                urlretrieve(settings.url_still, path_tmp)
            except HTTPError as err:
                logger.error(err)
            except Exception as err:
                logger.exception(err)

            try:
                img_orig = cv2.imread(path_tmp)

                if img_orig is not None and img_orig.shape is not None:
                    if any(
                        (settings.hflip, settings.vflip, settings.rotation)):
                        img_edited = None
                        if settings.hflip and settings.vflip:
                            img_edited = cv2.flip(img_orig, -1)
                        elif settings.hflip:
                            img_edited = cv2.flip(img_orig, 1)
                        elif settings.vflip:
                            img_edited = cv2.flip(img_orig, 0)

                        if settings.rotation:
                            img_edited = imutils.rotate_bound(
                                img_orig, settings.rotation)

                        if img_edited:
                            cv2.imwrite(path_file, img_edited)
                    else:
                        cv2.imwrite(path_file, img_orig)
                else:
                    os.rename(path_tmp, path_file)
            except Exception as err:
                logger.error(
                    "Could not convert, rotate, or invert image: {}".format(
                        err))
                try:
                    os.rename(path_tmp, path_file)
                except FileNotFoundError:
                    logger.error("Camera image not found")

        elif record_type == 'video':
            pass  # No video (yet)

    elif settings.library == 'http_address_requests':
        import cv2
        import imutils
        import requests

        if record_type in ['photo', 'timelapse']:
            path_tmp = "/tmp/tmpimg.jpg"
            try:
                os.remove(path_tmp)
            except FileNotFoundError:
                pass

            try:
                r = requests.get(settings.url_still)
                if r.status_code == 200:
                    open(path_tmp, 'wb').write(r.content)
                else:
                    logger.error(
                        "Could not download image. Status code: {}".format(
                            r.status_code))
            except requests.HTTPError as err:
                logger.error("HTTPError: {}".format(err))
            except Exception as err:
                logger.exception(err)

            try:
                img_orig = cv2.imread(path_tmp)

                if img_orig is not None and img_orig.shape is not None:
                    if any(
                        (settings.hflip, settings.vflip, settings.rotation)):
                        if settings.hflip and settings.vflip:
                            img_edited = cv2.flip(img_orig, -1)
                        elif settings.hflip:
                            img_edited = cv2.flip(img_orig, 1)
                        elif settings.vflip:
                            img_edited = cv2.flip(img_orig, 0)

                        if settings.rotation:
                            img_edited = imutils.rotate_bound(
                                img_orig, settings.rotation)

                        cv2.imwrite(path_file, img_edited)
                    else:
                        cv2.imwrite(path_file, img_orig)
                else:
                    os.rename(path_tmp, path_file)
            except Exception as err:
                logger.error(
                    "Could not convert, rotate, or invert image: {}".format(
                        err))
                try:
                    os.rename(path_tmp, path_file)
                except FileNotFoundError:
                    logger.error("Camera image not found")

        elif record_type == 'video':
            pass  # No video (yet)

    try:
        set_user_grp(path_file, 'mycodo', 'mycodo')
    except Exception as e:
        logger.exception(
            "Exception raised in 'camera_record' when setting user grp: "
            "{err}".format(err=e))

    # Turn off output, if configured
    if output_id and output_channel and daemon_control and not output_already_on:
        daemon_control.output_off(output_id,
                                  output_channel=output_channel.channel)

    try:
        set_user_grp(path_file, 'mycodo', 'mycodo')
        return save_path, filename
    except Exception as e:
        logger.exception(
            "Exception raised in 'camera_record' when setting user grp: "
            "{err}".format(err=e))
Example #38
0
def import_influxdb(form):
    """
    Receive a zip file contatining influx metastore and database that was
    exported with export_influxdb(), then import the metastore and database
    in InfluxDB.
    """
    action = '{action} {controller}'.format(
        action=TRANSLATIONS['import']['title'], controller="Influxdb")
    error = []

    try:
        correct_format = 'Mycodo_MYCODOVERSION_Influxdb_INFLUXVERSION_HOST_DATETIME.zip'
        upload_folder = os.path.join(INSTALL_DIRECTORY, 'upload')
        tmp_folder = os.path.join(upload_folder, 'mycodo_influx_tmp')
        full_path = None

        if not form.influxdb_import_file.data:
            error.append('No file present')
        elif form.influxdb_import_file.data.filename == '':
            error.append('No file name')
        else:
            # Split the uploaded file into parts
            file_name = form.influxdb_import_file.data.filename
            name = file_name.rsplit('.', 1)[0]
            extension = file_name.rsplit('.', 1)[1].lower()
            name_split = name.split('_')

            # Split the correctly-formatted filename into parts
            correct_name = correct_format.rsplit('.', 1)[0]
            correct_name_1 = correct_name.split('_')[0]
            correct_name_2 = correct_name.split('_')[2]
            correct_extension = correct_format.rsplit('.', 1)[1].lower()

            # Compare the uploaded filename parts to the correct parts
            try:
                if name_split[0] != correct_name_1:
                    error.append(
                        "Invalid file name: {n}: {fn} != {cn}.".format(
                            n=file_name, fn=name_split[0], cn=correct_name_1))
                    error.append(
                        "Correct format is: {fmt}".format(fmt=correct_format))
                elif name_split[2] != correct_name_2:
                    error.append("Invalid file name: {n}: {fn} != {cn}".format(
                        n=file_name, fn=name_split[2], cn=correct_name_2))
                    error.append(
                        "Correct format is: {fmt}".format(fmt=correct_format))
                elif extension != correct_extension:
                    error.append("Extension not 'zip'")
            except Exception as err:
                error.append("Exception while verifying file name: "
                             "{err}".format(err=err))

        if not error:
            # Save file to upload directory
            filename = secure_filename(form.influxdb_import_file.data.filename)
            full_path = os.path.join(tmp_folder, filename)
            assure_path_exists(tmp_folder)
            form.influxdb_import_file.data.save(
                os.path.join(tmp_folder, filename))

            # Check if contents of zip file are correct
            try:
                file_list = zipfile.ZipFile(full_path, 'r').namelist()
                if not any(".meta" in s for s in file_list):
                    error.append("No '.meta' file found in archive")
                elif not any(".manifest" in s for s in file_list):
                    error.append("No '.manifest' file found in archive")
            except Exception as err:
                error.append("Exception while opening zip file: "
                             "{err}".format(err=err))

        if not error:
            # Unzip file
            try:
                zip_ref = zipfile.ZipFile(full_path, 'r')
                zip_ref.extractall(tmp_folder)
                zip_ref.close()
            except Exception as err:
                error.append("Exception while extracting zip file: "
                             "{err}".format(err=err))

        if not error:
            try:
                import_settings_db = threading.Thread(
                    target=thread_import_influxdb, args=(tmp_folder, ))
                import_settings_db.start()
                return "success"
            except Exception as err:
                error.append("Exception while importing database: "
                             "{err}".format(err=err))
                return 'error'

    except Exception as err:
        error.append("Exception: {}".format(err))

    flash_success_errors(error, action, url_for('routes_page.page_export'))
Example #39
0
def import_settings(form):
    """
    Receive a zip file containing a Mycodo settings database that was
    exported with export_settings(), then back up the current Mycodo settings
    database and implement the one form the zip in its's place.
    """
    action = '{action} {controller}'.format(
        action=TRANSLATIONS['import']['title'],
        controller=TRANSLATIONS['settings']['title'])
    error = []

    try:
        correct_format = 'Mycodo_MYCODOVERSION_Settings_DBVERSION_HOST_DATETIME.zip'
        upload_folder = os.path.join(INSTALL_DIRECTORY, 'upload')
        tmp_folder = os.path.join(upload_folder, 'mycodo_db_tmp')
        mycodo_database_name = 'mycodo.db'
        full_path = None

        if not form.settings_import_file.data:
            error.append('No file present')
        elif form.settings_import_file.data.filename == '':
            error.append('No file name')
        else:
            # Split the uploaded file into parts
            file_name = form.settings_import_file.data.filename
            name = file_name.rsplit('.', 1)[0]
            extension = file_name.rsplit('.', 1)[1].lower()
            name_split = name.split('_')

            # Split the correctly-formatted filename into parts
            correct_name = correct_format.rsplit('.', 1)[0]
            correct_name_1 = correct_name.split('_')[0]
            correct_name_2 = correct_name.split('_')[2]
            correct_extension = correct_format.rsplit('.', 1)[1].lower()

            # Compare the uploaded filename parts to the correct parts
            try:
                if name_split[0] != correct_name_1:
                    error.append(
                        "Invalid file name: {n}: {fn} != {cn}.".format(
                            n=file_name, fn=name_split[0], cn=correct_name_1))
                    error.append(
                        "Correct format is: {fmt}".format(fmt=correct_format))
                elif name_split[2] != correct_name_2:
                    error.append("Invalid file name: {n}: {fn} != {cn}".format(
                        n=file_name, fn=name_split[2], cn=correct_name_2))
                    error.append(
                        "Correct format is: {fmt}".format(fmt=correct_format))
                elif extension != correct_extension:
                    error.append("Extension not 'zip'")
                elif name_split[1] > MYCODO_VERSION:
                    error.append(
                        "Invalid Mycodo version: {fv} > {mv}. "
                        "Only databases <= {mver} can only be imported".format(
                            fv=name_split[1],
                            mv=MYCODO_VERSION,
                            mver=name_split[1]))
            except Exception as err:
                error.append(
                    "Exception while verifying file name: {err}".format(
                        err=err))

        if not error:
            # Save file to upload directory
            filename = secure_filename(form.settings_import_file.data.filename)
            full_path = os.path.join(tmp_folder, filename)
            assure_path_exists(upload_folder)
            assure_path_exists(tmp_folder)
            form.settings_import_file.data.save(
                os.path.join(tmp_folder, filename))

            # Check if contents of zip file are correct
            try:
                file_list = zipfile.ZipFile(full_path, 'r').namelist()
                if len(file_list) > 1:
                    error.append("Incorrect number of files in zip: "
                                 "{an} != 1".format(an=len(file_list)))
                elif file_list[0] != mycodo_database_name:
                    error.append("Incorrect file in zip: {af} != {cf}".format(
                        af=file_list[0], cf=mycodo_database_name))
            except Exception as err:
                error.append("Exception while opening zip file: "
                             "{err}".format(err=err))

        if not error:
            # Unzip file
            try:
                zip_ref = zipfile.ZipFile(full_path, 'r')
                zip_ref.extractall(tmp_folder)
                zip_ref.close()
            except Exception as err:
                error.append("Exception while extracting zip file: "
                             "{err}".format(err=err))

        if not error:
            try:
                # Stop Mycodo daemon (backend)
                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "daemon_stop".format(
                    pth=INSTALL_DIRECTORY)
                _, _, _ = cmd_output(cmd)

                # Backup current database and replace with extracted mycodo.db
                imported_database = os.path.join(tmp_folder,
                                                 mycodo_database_name)
                backup_name = (
                    SQL_DATABASE_MYCODO + '.backup_' +
                    datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
                os.rename(SQL_DATABASE_MYCODO, backup_name)
                os.rename(imported_database, SQL_DATABASE_MYCODO)

                import_settings_db = threading.Thread(
                    target=thread_import_settings, args=(tmp_folder, ))
                import_settings_db.start()

                return backup_name
            except Exception as err:
                error.append("Exception while replacing database: "
                             "{err}".format(err=err))
                return None

    except Exception as err:
        error.append("Exception: {}".format(err))

    flash_success_errors(error, action, url_for('routes_page.page_export'))
Example #40
0
def import_settings(form):
    """
    Receive a zip file containing a Mycodo settings database that was
    exported with export_settings(), then back up the current Mycodo settings
    database and implement the one form the zip in its's place.
    """
    action = '{action} {controller}'.format(
        action=TRANSLATIONS['import']['title'],
        controller=TRANSLATIONS['settings']['title'])
    error = []

    try:
        correct_format = 'Mycodo_MYCODOVERSION_Settings_DBVERSION_HOST_DATETIME.zip'
        upload_folder = os.path.join(INSTALL_DIRECTORY, 'upload')
        tmp_folder = os.path.join(upload_folder, 'mycodo_db_tmp')
        full_path = None

        if not form.settings_import_file.data:
            error.append('No file present')
        elif form.settings_import_file.data.filename == '':
            error.append('No file name')
        else:
            # Split the uploaded file into parts
            file_name = form.settings_import_file.data.filename
            name = file_name.rsplit('.', 1)[0]
            extension = file_name.rsplit('.', 1)[1].lower()
            name_split = name.split('_')

            # Split the correctly-formatted filename into parts
            correct_name = correct_format.rsplit('.', 1)[0]
            correct_name_1 = correct_name.split('_')[0]
            correct_name_2 = correct_name.split('_')[2]
            correct_extension = correct_format.rsplit('.', 1)[1].lower()

            # Compare the uploaded filename parts to the correct parts
            try:
                if name_split[0] != correct_name_1:
                    error.append(
                        "Invalid file name: {n}: {fn} != {cn}.".format(
                            n=file_name, fn=name_split[0], cn=correct_name_1))
                    error.append(
                        "Correct format is: {fmt}".format(fmt=correct_format))
                elif name_split[2] != correct_name_2:
                    error.append("Invalid file name: {n}: {fn} != {cn}".format(
                        n=file_name, fn=name_split[2], cn=correct_name_2))
                    error.append(
                        "Correct format is: {fmt}".format(fmt=correct_format))
                elif extension != correct_extension:
                    error.append("Extension not 'zip'")
                elif name_split[1] > MYCODO_VERSION:
                    error.append(
                        "Invalid Mycodo version: {fv} > {mv}. "
                        "Only databases <= {mver} can only be imported".format(
                            fv=name_split[1],
                            mv=MYCODO_VERSION,
                            mver=name_split[1]))
            except Exception as err:
                error.append(
                    "Exception while verifying file name: {err}".format(
                        err=err))

        if not error:
            # Save file to upload directory
            filename = secure_filename(form.settings_import_file.data.filename)
            full_path = os.path.join(tmp_folder, filename)
            assure_path_exists(upload_folder)
            assure_path_exists(tmp_folder)
            form.settings_import_file.data.save(
                os.path.join(tmp_folder, filename))

            # Check if contents of zip file are correct
            try:
                file_list = zipfile.ZipFile(full_path, 'r').namelist()
                if DATABASE_NAME not in file_list:
                    error.append("{} not found in zip: {}".format(
                        DATABASE_NAME, ", ".join(file_list)))
            except Exception as err:
                error.append("Exception checking files in zip: "
                             "{err}".format(err=err))

        if not error:
            # Unzip file
            try:
                zip_ref = zipfile.ZipFile(full_path, 'r')
                zip_ref.extractall(tmp_folder)
                zip_ref.close()
            except Exception as err:
                error.append("Exception while extracting zip file: "
                             "{err}".format(err=err))

        if not error:
            try:
                # Stop Mycodo daemon (backend)
                cmd = "{pth}/mycodo/scripts/mycodo_wrapper " \
                      "daemon_stop".format(
                    pth=INSTALL_DIRECTORY)
                _, _, _ = cmd_output(cmd)

                # Backup current database and replace with extracted mycodo.db
                imported_database = os.path.join(tmp_folder, DATABASE_NAME)
                backup_name = (
                    SQL_DATABASE_MYCODO + '.backup_' +
                    datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
                os.rename(
                    SQL_DATABASE_MYCODO,
                    backup_name)  # rename current database to backup name
                os.rename(
                    imported_database,
                    SQL_DATABASE_MYCODO)  # move zipped database to Mycodo

                delete_directories = [
                    PATH_HTML_USER, PATH_PYTHON_CODE_USER,
                    PATH_FUNCTIONS_CUSTOM, PATH_INPUTS_CUSTOM,
                    PATH_OUTPUTS_CUSTOM, PATH_WIDGETS_CUSTOM, PATH_USER_SCRIPTS
                ]

                # Delete custom functions/inputs/outputs/widgets and generated HTML/Python code
                for each_dir in delete_directories:
                    if not os.path.exists(each_dir):
                        continue
                    for folder_name, sub_folders, filenames in os.walk(
                            each_dir):
                        for filename in filenames:
                            if filename == "__init__.py":
                                continue
                            file_path = os.path.join(folder_name, filename)
                            try:
                                os.remove(file_path)
                            except:
                                pass

                restore_directories = [(PATH_FUNCTIONS_CUSTOM,
                                        "custom_functions"),
                                       (PATH_INPUTS_CUSTOM, "custom_inputs"),
                                       (PATH_OUTPUTS_CUSTOM, "custom_outputs"),
                                       (PATH_WIDGETS_CUSTOM, "custom_widgets"),
                                       (PATH_USER_SCRIPTS, "user_scripts")]

                # Restore zipped custom functions/inputs/outputs/widgets
                for each_dir in restore_directories:
                    extract_dir = os.path.join(tmp_folder, each_dir[1])
                    if not os.path.exists(extract_dir):
                        continue
                    for folder_name, sub_folders, filenames in os.walk(
                            extract_dir):
                        for filename in filenames:
                            file_path = os.path.join(folder_name, filename)
                            new_path = os.path.join(each_dir[0], filename)
                            try:
                                os.rename(file_path, new_path)
                            except:
                                pass

                import_settings_db = threading.Thread(
                    target=thread_import_settings, args=(tmp_folder, ))
                import_settings_db.start()

                return backup_name
            except Exception as err:
                error.append("Exception while replacing database: "
                             "{err}".format(err=err))
                return None

    except Exception as err:
        error.append("Exception: {}".format(err))

    flash_success_errors(error, action, url_for('routes_page.page_export'))
Example #41
0
def save_conditional_code(error,
                          cond_statement,
                          cond_status,
                          unique_id,
                          table_conditions_all,
                          table_actions_all,
                          timeout=30,
                          test=False):
    lines_code = None
    cmd_status = None
    cmd_out = None

    try:
        pre_statement_run = """import os
import sys
sys.path.append(os.path.abspath('/var/mycodo-root'))
from mycodo.controllers.base_conditional import AbstractConditional
from mycodo.mycodo_client import DaemonControl
control = DaemonControl(pyro_timeout={timeout})

class ConditionalRun(AbstractConditional):
    def __init__(self, logger, function_id, message):
        super(ConditionalRun, self).__init__(logger, function_id, message, timeout={timeout})

        self.logger = logger
        self.function_id = function_id
        self.variables = {{}}
        self.message = message
        self.running = True

    def conditional_code_run(self):
""".format(timeout=timeout)

        if cond_statement:
            indented_code = textwrap.indent(cond_statement, ' ' * 8)
        else:
            indented_code = textwrap.indent("pass", ' ' * 8)

        cond_statement_run = pre_statement_run + indented_code
        cond_statement_run = cond_statement_replace(cond_statement_run,
                                                    table_conditions_all,
                                                    table_actions_all)

        cond_statement_run += """

    def function_status(self):
"""
        if cond_status:
            cond_statement_run += textwrap.indent(cond_status, ' ' * 8)
        else:
            cond_statement_run += textwrap.indent("pass", ' ' * 8)

        assure_path_exists(PATH_PYTHON_CODE_USER)
        file_run = '{}/conditional_{}.py'.format(PATH_PYTHON_CODE_USER,
                                                 unique_id)
        with open(file_run, 'w') as fw:
            fw.write('{}\n'.format(cond_statement_run))
            fw.close()
        set_user_grp(file_run, 'mycodo', 'mycodo')

        if len(cond_statement_run.splitlines()) > 999:
            error.append(
                "Too many lines in code. Reduce code to less than 1000 lines.")

        if test:
            lines_code = ''
            for line_num, each_line in enumerate(
                    cond_statement_run.splitlines(), 1):
                if len(str(line_num)) == 3:
                    line_spacing = ''
                elif len(str(line_num)) == 2:
                    line_spacing = ' '
                else:
                    line_spacing = '  '
                lines_code += '{sp}{ln}: {line}\n'.format(sp=line_spacing,
                                                          ln=line_num,
                                                          line=each_line)

            cmd_test = 'mkdir -p /var/mycodo-root/.pylint.d && ' \
                       'export PYTHONPATH=$PYTHONPATH:/var/mycodo-root && ' \
                       'export PYLINTHOME=/var/mycodo-root/.pylint.d && ' \
                       'pylint3 -d I,W0621,C0103,C0111,C0301,C0327,C0410,C0413,R0912,R0914,R0915 {path}'.format(
                           path=file_run)
            cmd_out, _, cmd_status = cmd_output(cmd_test)
    except Exception as err:
        error.append("Error saving/testing conditional code: {}".format(err))

    return error, lines_code, cmd_status, cmd_out
Example #42
0
def camera_record(record_type,
                  unique_id,
                  duration_sec=None,
                  tmp_filename=None):
    """
    Record still image from cameras
    :param record_type:
    :param unique_id:
    :param duration_sec:
    :param tmp_filename:
    :return:
    """
    daemon_control = None
    settings = db_retrieve_table_daemon(Camera, unique_id=unique_id)
    timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
    assure_path_exists(PATH_CAMERAS)
    camera_path = assure_path_exists(
        os.path.join(PATH_CAMERAS, '{uid}'.format(uid=settings.unique_id)))
    if record_type == 'photo':
        if settings.path_still != '':
            save_path = settings.path_still
        else:
            save_path = assure_path_exists(os.path.join(camera_path, 'still'))
        filename = 'Still-{cam_id}-{cam}-{ts}.jpg'.format(
            cam_id=settings.id, cam=settings.name,
            ts=timestamp).replace(" ", "_")
    elif record_type == 'timelapse':
        if settings.path_timelapse != '':
            save_path = settings.path_timelapse
        else:
            save_path = assure_path_exists(
                os.path.join(camera_path, 'timelapse'))
        start = datetime.datetime.fromtimestamp(
            settings.timelapse_start_time).strftime("%Y-%m-%d_%H-%M-%S")
        filename = 'Timelapse-{cam_id}-{cam}-{st}-img-{cn:05d}.jpg'.format(
            cam_id=settings.id,
            cam=settings.name,
            st=start,
            cn=settings.timelapse_capture_number).replace(" ", "_")
    elif record_type == 'video':
        if settings.path_video != '':
            save_path = settings.path_video
        else:
            save_path = assure_path_exists(os.path.join(camera_path, 'video'))
        filename = 'Video-{cam}-{ts}.h264'.format(cam=settings.name,
                                                  ts=timestamp).replace(
                                                      " ", "_")
    else:
        return

    assure_path_exists(save_path)

    if tmp_filename:
        filename = tmp_filename

    path_file = os.path.join(save_path, filename)

    # Turn on output, if configured
    if settings.output_id:
        daemon_control = DaemonControl()
        daemon_control.output_on(settings.output_id)

    # Pause while the output remains on for the specified duration.
    # Used for instance to allow fluorescent lights to fully turn on before
    # capturing an image.
    if settings.output_duration:
        time.sleep(settings.output_duration)

    if settings.library == 'picamera':
        # Try 5 times to access the pi camera (in case another process is accessing it)
        for _ in range(5):
            try:
                with picamera.PiCamera() as camera:
                    camera.resolution = (settings.width, settings.height)
                    camera.hflip = settings.hflip
                    camera.vflip = settings.vflip
                    camera.rotation = settings.rotation
                    camera.brightness = int(settings.brightness)
                    camera.contrast = int(settings.contrast)
                    camera.exposure_compensation = int(settings.exposure)
                    camera.saturation = int(settings.saturation)
                    camera.shutter_speed = settings.picamera_shutter_speed
                    camera.sharpness = settings.picamera_sharpness
                    camera.iso = settings.picamera_iso
                    camera.awb_mode = settings.picamera_awb
                    if settings.picamera_awb == 'off':
                        camera.awb_gains = (settings.picamera_awb_gain_red,
                                            settings.picamera_awb_gain_blue)
                    camera.exposure_mode = settings.picamera_exposure_mode
                    camera.meter_mode = settings.picamera_meter_mode
                    camera.image_effect = settings.picamera_image_effect

                    camera.start_preview()
                    time.sleep(2)  # Camera warm-up time

                    if record_type in ['photo', 'timelapse']:
                        camera.capture(path_file, use_video_port=False)
                    elif record_type == 'video':
                        camera.start_recording(path_file,
                                               format='h264',
                                               quality=20)
                        camera.wait_recording(duration_sec)
                        camera.stop_recording()
                    else:
                        return
                    break
            except picamera.exc.PiCameraMMALError:
                logger.error(
                    "The camera is already open by picamera. Retrying 4 times."
                )
            time.sleep(1)

    elif settings.library == 'fswebcam':
        cmd = "/usr/bin/fswebcam --device {dev} --resolution {w}x{h} --set brightness={bt}% " \
              "--no-banner --save {file}".format(dev=settings.device,
                                                 w=settings.width,
                                                 h=settings.height,
                                                 bt=settings.brightness,
                                                 file=path_file)
        if settings.hflip:
            cmd += " --flip h"
        if settings.vflip:
            cmd += " --flip h"
        if settings.rotation:
            cmd += " --rotate {angle}".format(angle=settings.rotation)
        if settings.custom_options:
            cmd += " {}".format(settings.custom_options)

        out, err, status = cmd_output(cmd, stdout_pipe=False, user='******')
        logger.debug("Camera debug message: "
                     "cmd: {}; out: {}; error: {}; status: {}".format(
                         cmd, out, err, status))

    # Turn off output, if configured
    if settings.output_id and daemon_control:
        daemon_control.output_off(settings.output_id)

    try:
        set_user_grp(path_file, 'mycodo', 'mycodo')
        return save_path, filename
    except Exception as e:
        logger.exception(
            "Exception raised in 'camera_record' when setting user grp: "
            "{err}".format(err=e))