def test_containers_create(self): container_name = 'plate_for_testing_containers_create' containers_create( name=container_name, grid=(8, 12), spacing=(9, 9), diameter=4, depth=8, volume=1000) p = containers_load(self.robot, container_name, 'A1') self.assertEquals(len(p), 96) self.assertEquals(len(p.rows), 12) self.assertEquals(len(p.cols), 8) self.assertEquals( p.get_parent(), self.robot.deck['A1']) self.assertEquals(p['C3'], p[18]) self.assertEquals(p['C3'].max_volume(), 1000) for i, w in enumerate(p): self.assertEquals(w, p[i]) # remove the file if we only created it for this test should_delete = False with open(environment.get_path('CONTAINERS_FILE')) as f: created_containers = json.load(f) del created_containers['containers'][p.get_name()] if not len(created_containers['containers'].keys()): should_delete = True if should_delete: os.remove(environment.get_path('CONTAINERS_FILE'))
def check_version_and_perform_necessary_migrations(): db_path = environment.get_path('DATABASE_FILE') conn = sqlite3.connect(db_path) db_version = database.get_version() if db_version == 0: execute_schema_change(conn, create_table_ContainerWells) execute_schema_change(conn, create_table_Containers) migrate_containers_and_wells() database.set_version(1)
def test_file(file_name): calib_dir = environment.get_path('CALIBRATIONS_DIR') shutil.copyfile(os.path.join(os.path.dirname(__file__), file_name), os.path.join(calib_dir, 'calibrations.json')) instrument.Instrument()._read_calibrations() file = os.path.join(calib_dir, 'calibrations.json') with open(file) as f: calib_object = json.load(f) self.assertEquals(calib_object['version'], 1)
def save_custom_container(data): container_file_path = environment.get_path('CONTAINERS_FILE') if not os.path.isfile(container_file_path): with open(container_file_path, 'w') as f: f.write(json.dumps({'containers': {}})) with open(container_file_path, 'r+') as f: old_data = json.load(f) old_data['containers'].update(data) f.seek(0) f.write(json.dumps(old_data, indent=4)) f.truncate()
def setUpClass(cls): # here we are copying containers from data and # re-defining APP_DATA_DIR. This way we can # load a few more custom containers old_container_loading.persisted_containers_dict.clear() os.environ['APP_DATA_DIR'] = os.path.join( os.path.dirname(__file__), 'opentrons-data') environment.refresh() source = os.path.join(os.path.dirname(__file__), 'data') containers_dir = environment.get_path('CONTAINERS_DIR') shutil.rmtree(containers_dir) shutil.copytree(source, containers_dir)
def setUpClass(cls): # here we are copying containers from data and # re-defining APP_DATA_DIR. This way we can # load a few more custom containers persisted_containers.persisted_containers_dict.clear() os.environ['APP_DATA_DIR'] = os.path.join( os.path.dirname(__file__), 'opentrons-data') environment.refresh() source = os.path.join( os.path.dirname(__file__), 'data' ) containers_dir = environment.get_path('CONTAINERS_DIR') shutil.rmtree(containers_dir) shutil.copytree(source, containers_dir)
def start(): data_dir = environment.get_path('APP_DATA_DIR') IS_DEBUG = os.environ.get('DEBUG', '').lower() == 'true' if not IS_DEBUG: run_once(data_dir) _start_connection_watcher() from opentrons.server import log # NOQA lg = logging.getLogger('opentrons-app') lg.info('Starting Flask Server') [app.logger.addHandler(handler) for handler in lg.handlers] socketio.run(app, debug=False, logger=False, use_reloader=False, log_output=False, engineio_logger=False, port=31950)
def test_get_path(self): app_data = environment.get_path("APP_DATA_DIR") self.assertTrue(os.path.exists(app_data)) log_file = environment.get_path("LOG_FILE") log_path, _ = os.path.split(log_file) self.assertTrue(os.path.exists(log_path)) with self.assertRaisesRegex(ValueError, 'Key "APP_DATA" not found in environment settings'): environment.get_path("APP_DATA") environment.settings["INVALID_KEY"] = "invalid path" with self.assertRaisesRegex(ValueError, 'Expected key suffix as _DIR or _FILE. "INVALID_KEY" received'): environment.get_path("INVALID_KEY")
def get_custom_container_files(): """ Traverses environment.get_path('CONTAINERS_DIR') to retrieve all .json files """ def is_special_file(name): return name.startswith('.') res = [] top = environment.get_path('CONTAINERS_DIR') for root, dirnames, files in os.walk(top): for name in filter(is_special_file, dirnames): dirnames.remove(name) res.extend([ os.path.join(root, name) for name in files if not is_special_file(name) and name.endswith('.json') ]) return res
def test_get_path(self): app_data = environment.get_path('APP_DATA_DIR') self.assertTrue(os.path.exists(app_data)) log_file = environment.get_path('LOG_FILE') log_path, _ = os.path.split(log_file) self.assertTrue(os.path.exists(log_path)) with self.assertRaisesRegex( ValueError, 'Key "APP_DATA" not found in environment settings'): environment.get_path('APP_DATA') environment.settings['INVALID_KEY'] = 'invalid path' with self.assertRaisesRegex( ValueError, 'Expected key suffix as _DIR or _FILE. "INVALID_KEY" received' ): environment.get_path('INVALID_KEY')
import logging from logging.config import dictConfig import os from opentrons.util import environment data_dir = environment.get_path('APP_DATA_DIR') LOG_FILE_DIR = os.path.join(data_dir, 'logs') logging_config = dict( version=1, formatters={ 'basic': { 'format': '%(asctime)s %(name)s %(levelname)s [Line %(lineno)s] %(message)s' #NOQA } }, handlers={ 'debug': { 'class': 'logging.StreamHandler', 'formatter': 'basic', }, 'null': { 'class': 'logging.NullHandler', }, 'opentrons-app': { 'class': 'logging.handlers.RotatingFileHandler', 'formatter': 'basic', 'filename': os.path.join(LOG_FILE_DIR, 'opentrons-app.log'), 'maxBytes': 5000000, 'backupCount': 3
import logging from logging.config import dictConfig from opentrons.util import environment LOG_FILENAME = environment.get_path('LOG_FILE') logging_config = dict( version=1, formatters={ 'basic': { 'format': '%(asctime)s %(name)s %(levelname)s [Line %(lineno)s] %(message)s' #NOQA } }, handlers={ 'debug': { 'class': 'logging.StreamHandler', 'formatter': 'basic', 'level': logging.DEBUG}, 'development': { 'class': 'logging.StreamHandler', 'formatter': 'basic', 'level': logging.WARNING}, 'file': { 'class': 'logging.handlers.RotatingFileHandler', 'formatter': 'basic', 'filename': LOG_FILENAME, 'maxBytes': 5000000, 'level': logging.INFO, 'backupCount': 3
def _get_calibration_file_path(self): """ :return: the absolute file path of the calibration file """ return environment.get_path('CALIBRATIONS_FILE')
import math import os import pkg_resources import time from threading import Event from opentrons.util.log import get_logger from opentrons.util.vector import Vector from opentrons.drivers.smoothie_drivers import VirtualSmoothie, SmoothieDriver from opentrons.util import trace, environment DEFAULTS_DIR_PATH = pkg_resources.resource_filename('opentrons.config', 'smoothie') DEFAULTS_FILE_PATH = os.path.join(DEFAULTS_DIR_PATH, 'smoothie-defaults.ini') CONFIG_DIR_PATH = environment.get_path('APP_DATA_DIR') CONFIG_DIR_PATH = os.path.join(CONFIG_DIR_PATH, 'smoothie') CONFIG_FILE_PATH = os.path.join(CONFIG_DIR_PATH, 'smoothie-config.ini') log = get_logger(__name__) class SmoothieDriver_1_2_0(SmoothieDriver): """ This object outputs raw GCode commands to perform high-level tasks. """ MOVE = 'G0' DWELL = 'G4' HOME = 'G28' SET_POSITION = 'G92'
def log_init(): """ Function that sets log levels and format strings. Checks for the OT_LOG_LEVEL environment variable otherwise defaults to DEBUG. """ fallback_log_level = 'INFO' ot_log_level = robot.config.log_level if ot_log_level not in logging._nameToLevel: log.info("OT Log Level {} not found. Defaulting to {}".format( ot_log_level, fallback_log_level)) ot_log_level = fallback_log_level level_value = logging._nameToLevel[ot_log_level] serial_log_filename = environment.get_path('SERIAL_LOG_FILE') api_log_filename = environment.get_path('LOG_FILE') logging_config = dict( version=1, formatters={ 'basic': { 'format': '%(asctime)s %(name)s %(levelname)s [Line %(lineno)s] %(message)s' # noqa: E501 } }, handlers={ 'debug': { 'class': 'logging.StreamHandler', 'formatter': 'basic', 'level': level_value }, 'serial': { 'class': 'logging.handlers.RotatingFileHandler', 'formatter': 'basic', 'filename': serial_log_filename, 'maxBytes': 5000000, 'level': logging.DEBUG, 'backupCount': 3 }, 'api': { 'class': 'logging.handlers.RotatingFileHandler', 'formatter': 'basic', 'filename': api_log_filename, 'maxBytes': 1000000, 'level': logging.DEBUG, 'backupCount': 5 } }, loggers={ '__main__': { 'handlers': ['debug', 'api'], 'level': logging.INFO }, 'opentrons.server': { 'handlers': ['debug', 'api'], 'level': level_value }, 'opentrons.api': { 'handlers': ['debug', 'api'], 'level': level_value }, 'opentrons.instruments': { 'handlers': ['debug', 'api'], 'level': level_value }, 'opentrons.robot.robot_configs': { 'handlers': ['debug', 'api'], 'level': level_value }, 'opentrons.drivers.smoothie_drivers.driver_3_0': { 'handlers': ['debug', 'api'], 'level': level_value }, 'opentrons.drivers.serial_communication': { 'handlers': ['serial'], 'level': logging.DEBUG } }) dictConfig(logging_config)
# this is a sample program to run Bradford Assay protocol # without native app from opentrons from opentrons import robot, containers, instruments from opentrons.util import environment environment.refresh() print(environment.get_path('CALIBRATIONS_FILE')) robot.commands() robot.connect(robot.get_serial_ports_list()[0]) tiprack = containers.load('tiprack-200ul', 'B1') tiprack2 = containers.load('tiprack-200ul', 'B2') trash = containers.load('point', 'C2') trough = containers.load('trough-12row', 'C1') plate = containers.load('96-PCR-flat', 'D1') tuberack = containers.load('tube-rack-2ml', 'D2') m50 = instruments.Pipette(name="p200", trash_container=trash, tip_racks=[tiprack, tiprack2], max_volume=50, axis="a", channels=8) p200 = instruments.Pipette(name="p200S", trash_container=trash,
def tearDownClass(cls): shutil.rmtree(environment.get_path('APP_DATA_DIR')) del os.environ['APP_DATA_DIR']
import atexit import os import socket import subprocess from opentrons import robot from opentrons.util import environment RESULT_SPACE = '\t- {}' FAIL = 'FAIL\t*** !!! ***' PASS = '******' USB_MOUNT_FILEPATH = '/mnt/usbdrive' DATA_FOLDER = environment.get_path('APP_DATA_DIR') VIDEO_FILEPATH = os.path.join(DATA_FOLDER, './cam_test.mp4') AUDIO_FILE_PATH = '/etc/audio/speaker-test.mp3' def _find_storage_device(): if os.path.exists(USB_MOUNT_FILEPATH) is False: run_quiet_process('mkdir {}'.format(USB_MOUNT_FILEPATH)) if os.path.ismount(USB_MOUNT_FILEPATH) is False: sdn1_devices = [ '/dev/sd{}1'.format(l) for l in 'abcdefgh' if os.path.exists('/dev/sd{}1'.format(l)) ] if len(sdn1_devices) == 0: print(RESULT_SPACE.format(FAIL)) return
import sqlite3 # import warnings from typing import List from opentrons.containers.placeable import Container, Well, Module from opentrons.data_storage import database_queries as db_queries from opentrons.util import environment from opentrons.util.vector import Vector from opentrons.data_storage import labware_definitions as ldef from opentrons.data_storage import serializers from opentrons.config import feature_flags as fflags import logging SUPPORTED_MODULES = ['magdeck', 'tempdeck'] log = logging.getLogger(__file__) database_path = environment.get_path('DATABASE_FILE') if not fflags.split_labware_definitions(): log.debug("Database path: {}".format(database_path)) # ======================== Private Functions ======================== # def _parse_container_obj(container: Container): # Note: in the new labware system, container coordinates are always (0,0,0) return dict(zip('xyz', container._coordinates)) def _parse_well_obj(well: Well): r_x, r_y, r_z = well._coordinates + well.bottom()[1] location, depth = well.get_name(), well.z_size() diameter = well.properties.get('diameter', None)
def tearDownClass(cls): shutil.rmtree(environment.get_path('APP_DATA_DIR')) del os.environ['APP_DATA_DIR'] environment.refresh()
from opentrons.server.endpoints import (wifi, control) from opentrons.config import feature_flags as ff from opentrons.util import environment from opentrons.deck_calibration import endpoints as dc_endp from logging.config import dictConfig try: from ot2serverlib import endpoints except ModuleNotFoundError: print("Module ot2serverlib not found--using fallback implementation") from opentrons.server.endpoints import serverlib_fallback as endpoints from argparse import ArgumentParser log = logging.getLogger(__name__) lock_file_path = '/tmp/resin/resin-updates.lock' log_file_path = environment.get_path('LOG_DIR') def lock_resin_updates(): if os.environ.get('RUNNING_ON_PI'): import fcntl try: with open(lock_file_path, 'w') as fd: fd.write('a') fcntl.flock(fd, fcntl.LOCK_EX) fd.close() except OSError: log.warning('Unable to create resin-update lock file')