def env_handler(request, *callback_args, **callback_kwargs): template = loader.get_template('env_health_dashboard/env.html') env = callback_kwargs['env'].lower() brand = callback_kwargs['brand'] if brand not in settings.ENV: brand = settings.DEFAULT_BRAND if env != "self_test" and env not in settings.ENV[brand.lower()]: env = settings.DEFAULT_ENV skinnyColumnPortletAssignments = \ portlet_configuration_service.get_skinny_column_portlet_assignment_from_env(env) wideColumnPortletAssignments = \ portlet_configuration_service.get_wide_column_portlet_assignment_from_env(env) context = RequestContext( request, { 'skinny_portlet_list': initialize_services(skinnyColumnPortletAssignments), 'wide_portlet_list': initialize_services(wideColumnPortletAssignments), 'env': env, 'version': version.getVersion(), 'brand': brand, 'timestamp': _format_time(datetime.now()), 'env_list': settings.ENV[brand.lower()] }) return HttpResponse(template.render(context))
def onAbout(self, event): info = wx.AboutDialogInfo() info.AddDeveloper('bogolt ([email protected])') info.SetName('dcLord') info.SetWebSite('https://github.com/bogolt/dclord') info.SetVersion(version.getVersion()) info.SetDescription('Divide and Conquer\ngame client\nsee at: http://www.the-game.ru') wx.AboutBox(info)
def OnMenuAbout(self, event): info = wx.AboutDialogInfo() description = _("""\ ConeDevelopment is an interactive program to explore the development of a truncated cone. Truncating a cone by a plane results in second degree curves in the truncation plane. But what about the curve when you develop the surface of the truncated cone? Surprisingly these curves are not so obvious or trivial. This program lets you explore the question by computing the curve numerically. One can modify the cone shape and plane angle as desired and see the resulting development curve. This program was based on another more complex program, https://github.com/pbauermeister/Anamorphy. That is why (1) in the source code there may be remains of the original program, and (2) it is still in Python 2.x. I would be interested in any analytical solution (as opposed to the very overkill numerical solution proposed here). You can kindly contact me (or post a github ticket) for any hint. This program was inspired by an interesting discussion with Mr. Udo Bund, my wife's uncle. It was about the construction of a cap to fit on the nozzle of a high-pressure Kaercher cleaner in order to reduce the peripheral projections. """).replace('\n\n', '@@@').replace('\n', ' ').replace('@@@', '\n\n') licence = _("""\ ConeDevelopment is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. ConeDevelopment is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with ConeDevelopment; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA""") info.SetIcon(wx.Icon('logo64.png', wx.BITMAP_TYPE_PNG)) info.SetName('ConeDevelopment') info.SetVersion(version.getVersion()) info.SetDescription(description) info.SetCopyright(_('(C) Copyright 2018 by Pascal Bauermeister')) info.SetWebSite('https://github.com/pbauermeister/ConeDevelopment') info.SetLicence(licence) info.AddDeveloper('Pascal Bauermeister') info.AddDocWriter('Pascal Bauermeister') #info.AddArtist('Pascal Bauermeister') #info.AddTranslator('Pascal Bauermeister') wx.AboutBox(info)
def main(): args = getarg() version = getVersion() if not version: sys.exit(-1) updateVersion(version) if args.git: commitAndTagging(version)
def version(self): """ Returns version information about the server. Parameters: None Returns: * Dictionary -- "version" - string -- "name" - string """ tmp = {} tmp['version'] = version.getVersion() tmp['name'] = version.getName() return tmp
def setup_version(env, Configure, args): if int(args.get('release', 0)): vtype = 0 elif int(args.get('snapshot', 0)): vtype = 1 else: vtype = 2 env["ENV"]["VERSION"] = getVersion(vtype) print "Building version %s." % (env["ENV"]["VERSION"], ) conf = Configure(env, checks) if not conf.CheckArchitecture(env, args): print "Error. Your architecture is unsupported. Please contact the Yhc developers." sys.exit(1) conf.CheckOS(env) conf.Finish()
def get_version(): if request.method == 'GET': ver = version.getVersion() if version.updateCheck()[0]: updateready = True updateversion = version.updateCheck()[1] print( 'Update steht bereit (' + updateversion + ')! Weitere Informationen unter https://github.com/rix1337/RSScrawler/releases/latest' ) else: updateready = False return jsonify({ "version": { "ver": ver, "update_ready": updateready, "docker": docker, } }) else: return "Failed", 405
from distutils.core import setup import os import re import sys import version release = version.getVersion() if len(sys.argv) == 2 and sys.argv[1] == 'builddoc': os.execlp('sphinx-build', '-Drelease=' + release, '-Dversion=' + '.'.join(release.split('.', 2)[0:2]), '.', 'html') with open('README.rst') as f: readme = f.read() with open('version-history.rst') as f: readme += '\n' + f.read() kwargs = { 'name': 'pygtrie', 'version': release, 'description': 'Trie data structure implementation.', 'long_description': readme, 'author': 'Michal Nazarewicz', 'author_email': '*****@*****.**', 'url': 'https://github.com/google/pygtrie', 'py_modules': ['trie'], 'license': 'Apache-2.0', 'platforms': 'Platform Independent', 'keywords': ['trie', 'prefix tree', 'data structure'],
def get_all(): if request.method == 'GET': general = RssConfig('RSScrawler') alerts = RssConfig('Notifications') crawljobs = RssConfig('Crawljobs') mb = RssConfig('MB') sj = RssConfig('SJ') yt = RssConfig('YT') ver = version.getVersion() if version.updateCheck()[0]: updateready = True updateversion = version.updateCheck()[1] print( 'Update steht bereit (' + updateversion + ')! Weitere Informationen unter https://github.com/rix1337/RSScrawler/releases/latest' ) else: updateready = False log = '' logfile = os.path.join(os.path.dirname(sys.argv[0]), 'RSScrawler.log') if os.path.isfile(logfile): logfile = open(os.path.join(logfile)) output = StringIO.StringIO() for line in reversed(logfile.readlines()): output.write("<p>" + line.replace("\n", "</p>")) log = output.getvalue() return jsonify({ "version": { "ver": ver, "update_ready": updateready, "docker": docker, }, "log": log, "lists": { "mb": { "filme": getListe('MB_Filme'), "filme3d": getListe('MB_3D'), "regex": getListe('MB_Regex'), }, "sj": { "serien": getListe('SJ_Serien'), "regex": getListe('SJ_Serien_Regex'), "staffeln_regex": getListe('SJ_Staffeln_Regex'), }, "mbsj": { "staffeln": getListe('MB_Staffeln'), }, "yt": { "kanaele_playlisten": getListe('YT_Channels'), }, }, "settings": { "general": { "pfad": general.get("jdownloader"), "port": to_int(general.get("port")), "prefix": general.get("prefix"), "interval": to_int(general.get("interval")), "english": bool(general.get("english")), "hoster": general.get("hoster"), }, "alerts": { "homeassistant": alerts.get("homeassistant"), "pushbullet": alerts.get("pushbullet"), "pushover": alerts.get("pushover"), }, "crawljobs": { "autostart": bool(crawljobs.get("autostart")), "subdir": bool(crawljobs.get("subdir")), }, "mb": { "quality": mb.get("quality"), "ignore": mb.get("ignore"), "regex": bool(mb.get("regex")), "imdb_score": to_float(mb.get("imdb")), "imdb_year": to_int(mb.get("imdbyear")), "historical": bool(mb.get("historical")), "force_dl": bool(mb.get("enforcedl")), "cutoff": bool(mb.get("cutoff")), "crawl_3d": bool(mb.get("crawl3d")), }, "sj": { "quality": sj.get("quality"), "ignore": sj.get("rejectlist"), "regex": bool(sj.get("regex")), }, "mbsj": { "enabled": bool(mb.get("crawlseasons")), "quality": mb.get("seasonsquality"), "packs": bool(mb.get("seasonpacks")), "source": mb.get("seasonssource"), }, "yt": { "enabled": bool(yt.get("youtube")), "max": to_int(yt.get("maxvideos")), "ignore": yt.get("ignore"), } } }) else: return "Failed", 405
def get_post_settings(): if request.method == 'GET': general = RssConfig('RSScrawler') alerts = RssConfig('Notifications') crawljobs = RssConfig('Crawljobs') mb = RssConfig('MB') sj = RssConfig('SJ') yt = RssConfig('YT') return jsonify({ "settings": { "general": { "pfad": general.get("jdownloader"), "port": to_int(general.get("port")), "prefix": general.get("prefix"), "interval": to_int(general.get("interval")), "english": bool(general.get("english")), "hoster": general.get("hoster"), }, "alerts": { "homeassistant": alerts.get("homeassistant"), "pushbullet": alerts.get("pushbullet"), "pushover": alerts.get("pushover"), }, "crawljobs": { "autostart": bool(crawljobs.get("autostart")), "subdir": bool(crawljobs.get("subdir")), }, "mb": { "quality": mb.get("quality"), "ignore": mb.get("ignore"), "regex": bool(mb.get("regex")), "imdb_score": to_float(mb.get("imdb")), "imdb_year": to_int(mb.get("imdbyear")), "historical": bool(mb.get("historical")), "force_dl": bool(mb.get("enforcedl")), "cutoff": bool(mb.get("cutoff")), "crawl_3d": bool(mb.get("crawl3d")), }, "sj": { "quality": sj.get("quality"), "ignore": sj.get("rejectlist"), "regex": bool(sj.get("regex")), }, "mbsj": { "enabled": bool(mb.get("crawlseasons")), "quality": mb.get("seasonsquality"), "packs": bool(mb.get("seasonpacks")), "source": mb.get("seasonssource"), }, "yt": { "enabled": bool(yt.get("youtube")), "max": to_int(yt.get("maxvideos")), "ignore": yt.get("ignore"), } } }) if request.method == 'POST': data = request.json with open( os.path.join(os.path.dirname(sys.argv[0]), 'Einstellungen/RSScrawler.ini'), 'wb') as f: f.write('# RSScrawler.ini (Stand: RSScrawler ' + version.getVersion() + ')\n') f.write("\n[RSScrawler]\n") f.write("jdownloader = " + to_str(data['general']['pfad']).encode('utf-8') + "\n") f.write("port = " + to_str(data['general']['port']).encode('utf-8') + "\n") f.write("prefix = " + to_str(data['general']['prefix']).encode('utf-8').lower() + "\n") interval = to_str(data['general']['interval']).encode('utf-8') if to_int(interval) < 3: interval = '3' f.write("interval = " + interval + "\n") f.write("english = " + to_str(data['general']['english']).encode('utf-8') + "\n") f.write("hoster = " + to_str(data['general']['hoster']).encode('utf-8') + "\n") f.write("\n[MB]\n") f.write("quality = " + to_str(data['mb']['quality']).encode('utf-8') + "\n") f.write("ignore = " + to_str(data['mb']['ignore']).encode('utf-8').lower() + "\n") f.write("historical = " + to_str(data['mb']['historical']).encode('utf-8') + "\n") f.write("regex = " + to_str(data['mb']['regex']).encode('utf-8') + "\n") f.write("cutoff = " + to_str(data['mb']['cutoff']).encode('utf-8') + "\n") f.write("crawl3d = " + to_str(data['mb']['crawl_3d']).encode('utf-8') + "\n") f.write("enforcedl = " + to_str(data['mb']['force_dl']).encode('utf-8') + "\n") f.write("crawlseasons = " + to_str(data['mbsj']['enabled']).encode('utf-8') + "\n") f.write("seasonsquality = " + to_str(data['mbsj']['quality']).encode('utf-8') + "\n") f.write("seasonpacks = " + to_str(data['mbsj']['packs']).encode('utf-8') + "\n") f.write("seasonssource = " + to_str(data['mbsj']['source']).encode('utf-8').lower() + "\n") f.write("imdbyear = " + to_str(data['mb']['imdb_year']).encode('utf-8') + "\n") imdb = to_str(data['mb']['imdb_score']).encode('utf-8') if re.match('[^0-9]', imdb): imdb = 0.0 elif imdb == '': imdb = 0.0 else: imdb = round( float( to_str( data['mb']['imdb_score']).encode('utf-8').replace( ",", ".")), 1) if imdb > 10: imdb = 10.0 f.write("imdb = " + to_str(imdb) + "\n") f.write("\n[SJ]\n") f.write("quality = " + to_str(data['sj']['quality']).encode('utf-8') + "\n") f.write("rejectlist = " + to_str(data['sj']['ignore']).encode('utf-8').lower() + "\n") f.write("regex = " + to_str(data['sj']['regex']).encode('utf-8') + "\n") f.write("\n[YT]\n") f.write("youtube = " + to_str(data['yt']['enabled']).encode('utf-8') + "\n") maxvideos = to_str(data['yt']['max']).encode('utf-8') if maxvideos == "": maxvideos = "10" if to_int(maxvideos) < 1: f.write("maxvideos = 1\n") elif to_int(maxvideos) > 50: f.write("maxvideos = 50\n") else: f.write("maxvideos = " + to_str(maxvideos) + "\n") f.write("ignore = " + to_str(data['yt']['ignore']).encode('utf-8') + "\n") f.write("\n[Notifications]\n") f.write("homeassistant = " + to_str(data['alerts']['homeassistant']).encode('utf-8') + "\n") f.write("pushbullet = " + to_str(data['alerts']['pushbullet']).encode('utf-8') + "\n") f.write("pushover = " + to_str(data['alerts']['pushover']).encode('utf-8') + "\n") f.write("\n[Crawljobs]\n") f.write("autostart = " + to_str(data['crawljobs']['autostart']).encode('utf-8') + "\n") f.write("subdir = " + to_str(data['crawljobs']['subdir']).encode('utf-8') + "\n") files.check() return "Success", 201 else: return "Failed", 405
from optparse import OptionParser import sys parser = OptionParser() parser.add_option('-v', '--version', action='store_true', dest='version', help="get version information") parser.add_option('-s', '--session', action='store', dest='session', help="name of existing session to continue") parser.add_option('-c', '--clients', action='store', dest='clients', help="comma separated list of clients") parser.add_option('-p', '--prevapp', action='store_true', dest='prevapp', help="restart previous application") (options, args) = parser.parse_args() if options.version: import version print 'Leginon version: %s' % (version.getVersion(),) print ' Installed in: %s' % (version.getInstalledLocation(),) sys.exit()
# The Leginon software is Copyright 2004-2012 # The Scripps Research Institute, La Jolla, CA # For terms of the license agreement # see http://ami.scripps.edu/software/leginon-license # # $Source: /ami/sw/cvsroot/pyleginon/__init__.py,v $ # $Revision: 1.2 $ # $Name: not supported by cvs2svn $ # $Date: 2004-10-26 20:21:53 $ # $Author: suloway $ # $State: Exp $ # $Locker: $ import version __version__ = version.getVersion()
parser = OptionParser() parser.add_option('-v', '--version', action='store_true', dest='version', help="get version information") parser.add_option('-s', '--session', action='store', dest='session', help="name of existing session to continue") parser.add_option('-c', '--clients', action='store', dest='clients', help="comma separated list of clients") parser.add_option('-p', '--prevapp', action='store_true', dest='prevapp', help="restart previous application") (options, args) = parser.parse_args() if options.version: import version print 'Leginon version: %s' % (version.getVersion(), ) print ' Installed in: %s' % (version.getInstalledLocation(), ) sys.exit()
import os import version from setuptools import setup, find_packages setup( name="zgres", version=version.getVersion(), packages=find_packages(), author='Brian Sutherland', author_email='*****@*****.**', url='https://github.com/jinty/zgres', description="Database Connection and failover manager for PostgreSQL", entry_points={ 'console_scripts': [ 'zgres-show = zgres.show:show_cli', 'zgres-apply = zgres.apply:apply_cli', 'zgres-sync = zgres.sync:sync_cli', 'zgres-deadman = zgres.deadman:deadman_cli', 'zgres-deadman-exporter = zgres.prometheus:deadman_exporter', ], 'zgres.sync': [ 'zgres-apply = zgres.apply:Plugin', 'zookeeper = zgres.zookeeper:ZooKeeperSource', 'mock-subscriber = zgres.tests:MockSyncPlugin', # only for tests ], 'zgres.deadman': [ 'apt = zgres.apt:AptPostgresqlPlugin', 'ec2 = zgres.ec2:Ec2Plugin', 'follow-the-leader = zgres.replication:FollowTheLeader', 'select-furthest-ahead-replica = zgres.replication:SelectFurthestAheadReplica', 'ec2-snapshot = zgres.ec2:Ec2SnapshotBackupPlugin',
warnings.warn('Could not import `base_node_rpc` (expected during ' 'install).') sys.path.insert(0, '.') import version install_distutils_tasks() DEFAULT_ARDUINO_BOARDS = [] #['mega2560'] PROJECT_PREFIX = [ d for d in path('.').dirs() if d.joinpath('Arduino').isdir() and d.name not in ('build', ) ][0].name module_name = PROJECT_PREFIX package_name = module_name.replace('_', '-') rpc_module = import_module(PROJECT_PREFIX) VERSION = version.getVersion() URL = 'http://github.com/wheeler-microfluidics/%s.git' % package_name PROPERTIES = OrderedDict([('package_name', package_name), ('display_name', package_name), ('manufacturer', 'Wheeler Lab'), ('software_version', VERSION), ('url', URL)]) LIB_PROPERTIES = PROPERTIES.copy() LIB_PROPERTIES.update( OrderedDict([('author', 'Christian Fobel'), ('author_email', '*****@*****.**'), ('short_description', 'Template project ' 'demonstrating use of Arduino base node ' 'RPC framework.'), ('version', VERSION), ('long_description', ''), ('category', 'Communication'), ('architectures', 'avr')]))
from twisted.runner import procmon from twext.web2.server import Site from twext.python.log import Logger, LoggingMixIn from twext.python.log import logLevelForNamespace, setLogLevelForNamespace from twext.internet.ssl import ChainingOpenSSLContextFactory from twext.internet.tcp import MaxAcceptTCPServer, MaxAcceptSSLServer from twext.web2.channel.http import LimitingHTTPFactory, SSLRedirectRequest try: from twistedcaldav.version import version except ImportError: sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "support")) from version import version as getVersion version = "%s (%s)" % getVersion() from twistedcaldav.config import ConfigurationError from twistedcaldav.config import config from twistedcaldav.directory.principal import DirectoryPrincipalProvisioningResource from twistedcaldav.directory import calendaruserproxy from twistedcaldav.directory.calendaruserproxyloader import XMLCalendarUserProxyLoader from twistedcaldav.localization import processLocalizationFiles from twistedcaldav.mail import IMIPReplyInboxResource from twistedcaldav.static import CalendarHomeProvisioningFile from twistedcaldav.static import IScheduleInboxFile from twistedcaldav.static import TimezoneServiceFile from twistedcaldav.stdconfig import DEFAULT_CONFIG, DEFAULT_CONFIG_FILE from twistedcaldav.upgrade import upgradeData from twext.web2.metafd import ConnectionLimiter, ReportingHTTPService
action="store_true", help="Disable warnings.") if __name__ == '__main__': """ Some configuration for the argument parser like checking require argument : --fasta1, --fasta2 and --tabinput. We setup the output file name, create output directory (default : 'result/'), setup the tempfile or file depending --notempfile option and signal useless argument (e.g. --type if the format of tabinput isn't GFF). """ args = parser.parse_args() if len(sys.argv) < 2: getVersion() parser.print_help() sys.exit(1) if args.update: updateTag() sys.exit(1) if args.version: getVersion() sys.exit(1) if args.warn: warnings.filterwarnings("ignore") if args.fasta1 == None: sys.exit("ERROR : Argument --fasta1 (-f1) is missing.") if args.fasta2 == None: sys.exit("ERROR : Argument --fasta2 (-f2) is missing.")
import sys sys.path.insert(0, '.') import version open('RELEASE-VERSION', 'wb').write(version.getVersion())
warnings.warn('Could not import `base_node_rpc` (expected during ' 'install).') sys.path.insert(0, '.') import version install_distutils_tasks() DEFAULT_ARDUINO_BOARDS = ['uno'] PROJECT_PREFIX = [d for d in path('.').dirs() if d.joinpath('Arduino').isdir() and d.name not in ('build', )][0].name module_name = PROJECT_PREFIX package_name = module_name.replace('_', '-') rpc_module = import_module(PROJECT_PREFIX) VERSION = version.getVersion() URL='http://github.com/wheeler-microfluidics/%s.git' % package_name PROPERTIES = OrderedDict([('package_name', package_name), ('display_name', package_name), ('manufacturer', 'Wheeler Lab'), ('software_version', VERSION), ('url', URL)]) LIB_PROPERTIES = PROPERTIES.copy() LIB_PROPERTIES.update(OrderedDict([('author', 'Christian Fobel'), ('author_email', '*****@*****.**'), ('short_description', 'Arduino-based pulse ' 'counting firmware and Python driver.'), ('version', VERSION), ('long_description', ''), ('category', 'Communication'), ('architectures', 'avr')]))
def __init__(self, parent): sz = int(config.options['window']['width']), int(config.options['window']['height']) wx.Frame.__init__(self, parent, -1, "dcLord (%s): Divide & Conquer client (www.the-game.ru)"%(version.getVersion(),), style=wx.DEFAULT_FRAME_STYLE | wx.NO_FULL_REPAINT_ON_RESIZE, size=sz) if int(config.options['window']['is_maximized'])==1: self.Maximize() #import_raw.processAllUnpacked() #self.map.turn = db.db.max_turn self.log_dlg = wx.TextCtrl(self, 1, style=wx.TE_MULTILINE) self.log_dlg.Disable() self.log_dlg.SetBackgroundColour('WHITE') serialization.load(ev_cb = self) self.info_panel = planet_window.InfoPanel(self) self.object_filter = object_filter.FilterPanel(self) self.planet_filter = object_filter.FilterFrame(self) #self.unit_list = unit_list.UnitPrototypeListWindow(self, 0) self.history = history.HistoryPanel(self) #self.area_list = area_panel.AreaListWindow(self) self.sync_path = config.options['data']['sync_path'] self.info_panel.turn = db.getTurn() print 'db max turn is %s'%(db.getTurn(),) self.map = map.Map(self) self.map.turn = db.getTurn() self.map.set_planet_filter(self.planet_filter) print 'map turn is set to %s'%(self.map.turn,) self.map.update() self.started = False self.actions_queue = [] self.pf = None if self.map.turn != 0: self.log('loaded data for turn %d'%(self.map.turn,)) self.pending_actions = request.RequestMaker() self._mgr = wx.aui.AuiManager(self) self.command_selected_user = False info = wx.aui.AuiPaneInfo() info.CenterPane() info.Fixed() info.DefaultPane() info.Resizable(True) info.CaptionVisible(False) self._mgr.AddPane(self.map, info) self._mgr.AddPane(self.history, wx.RIGHT, "Turn") self._mgr.AddPane(self.info_panel, wx.RIGHT, "Info") self._mgr.AddPane(self.planet_filter, wx.LEFT, "Planets") self._mgr.AddPane(self.object_filter, wx.LEFT, "Filter") #self._mgr.AddPane(self.unit_list, wx.RIGHT, "Units") self._mgr.AddPane(self.log_dlg, wx.BOTTOM, "Log") #self._mgr.AddPane(self.area_list, wx.RIGHT, "Areas") #self.map.set_planet_fileter(self.planet_filter) self._mgr.Update() #TODO: load from data self.manual_control_units = set() #unit id self.manual_control_units.add( 7906 ) self.manual_control_units.add( 7291 ) # probes over Othes planets #TODO: load from file self.exclude_fleet_names = [] #busy, taken, etc... #p = config.options['window']['pane-info'] #if p: # print 'load p %s'%(p,) # self._mgr.LoadPerspective( p ) self.recv_data_callback = {} self.makeMenu() self.Bind(event.EVT_DATA_DOWNLOAD, self.onDownloadRawData) self.Bind(event.EVT_MAP_UPDATE, self.onMapUpdate) self.Bind(event.EVT_USER_SELECT, self.onSelectUser) self.Bind(event.EVT_ACTIONS_REPLY, self.onActionsReply) self.Bind(event.EVT_SELECT_OBJECT, self.info_panel.selectObject) self.Bind(event.EVT_TURN_SELECTED, self.onTurnSelected) self.Bind(event.EVT_LOG_APPEND, self.onLog) #import_raw.processAllUnpacked() #serialization.save() #todo - restore previous state #self.Maximize() self.history.updateTurns(self.map.turn)
install_requires += ['pycairo-gtk2-win', 'pywin32'] else: try: import gtk except ImportError: print >> sys.stderr, ("Please install Python bindings for Gtk 2 using " "your system's package manager.") try: import cairo except ImportError: print >> sys.stderr, ("Please install Python bindings for cairo using " "your system's package manager.") setup(name='microdrop', version=version.getVersion(), description='MicroDrop is a graphical user interface for the DropBot ' 'Digital Microfluidics control system', keywords='digital microfluidics dmf automation dropbot microdrop', author='Ryan Fobel and Christian Fobel', author_email='[email protected] and [email protected]', url='http://microfluidics.utoronto.ca/microdrop', license='GPL', long_description='\n%s\n' % open('README.md', 'rt').read(), packages=['microdrop'], include_package_data=True, install_requires=install_requires, entry_points = {'console_scripts': ['microdrop = microdrop.microdrop:main']})
#!/usr/bin/env python import distutils.core try: from distutils.command.build_py import build_py_2to3 as build_py except ImportError: from distutils.command.build_py import build_py import version # Setup script for path kw = { 'name': 'path_helpers', 'version': version.getVersion(), 'description': 'Helper class and functions for working with file path', 'author': 'Christian Fobel', 'author_email': '*****@*****.**', 'url': 'http://github.com/cfobel/path_helpers', 'license': 'MIT License', 'packages': ['path_helpers'], 'cmdclass': dict(build_py=build_py), } # If we're running Python 2.3, add extra information if hasattr(distutils.core, 'setup_keywords'): if 'classifiers' in distutils.core.setup_keywords: kw['classifiers'] = [ 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: MIT License',
#source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'microdrop' copyright = u'2016, Christian Fobel and Ryan Fobel' author = u'Christian Fobel and Ryan Fobel' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = version.getVersion() # The short X.Y version. version = u'.'.join(release.split('.')[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y'
#!/usr/bin/python from copy import deepcopy import os import re import pprint import sys from version import getVersion POCKETNC_DIRECTORY = "/home/pocketnc/pocketnc" VERSION = getVersion() sys.path.insert(0, os.path.join(POCKETNC_DIRECTORY, "Rockhopper")) from ini import read_ini_data, merge_ini_data, write_ini_data INI_FILE = os.path.join(POCKETNC_DIRECTORY, "Settings/PocketNC.ini") INI_DEFAULT_FILE = os.path.join(POCKETNC_DIRECTORY, "Settings/versions/%s/PocketNC.ini" % VERSION) CALIBRATION_OVERLAY_FILE = os.path.join(POCKETNC_DIRECTORY, "Settings/CalibrationOverlay.inc") if __name__ == "__main__": defaults = read_ini_data(INI_DEFAULT_FILE) if os.path.isfile(CALIBRATION_OVERLAY_FILE): overlay = read_ini_data(CALIBRATION_OVERLAY_FILE) else: overlay = {'parameters': [], 'sections': {}} merged = merge_ini_data(defaults, overlay)