def __init__(self, serviceUrl): super(JobRunner, self).__init__() self.url = serviceUrl self.service = GeqeAPI.GeqeRestHelper(self.url)
} ] } """ if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("serviceUrl", help="loop back data service url ie http://localhost:5500") parser.add_argument("inputFile", help="polygon file to upload") parser.add_argument("--name", help="name of the dataset") parser.add_argument("--username", help="name of the user account.") args = parser.parse_args() dataConnector = GeqeAPI.GeqeRestHelper(args.serviceUrl) if args.name is None or args.username is None: print 'name and username are required' parser.print_help() sys.exit(1) with open(args.inputFile) as handle: data = json.loads(handle.read()) data['name'] = args.name data['username'] = args.username for site in data['sites']: if 'dates' in site and len(site['dates']) > 0: for daterange in site['dates']:
]) command.extend([self.service.serviceURL, job['id']]) command = map(str, command) with open('lastcommand.sh', 'w') as handle: handle.write(' '.join(command)) result = subprocess.call(command, stdout=stdoutFile, stderr=stderrFile) print 'result: ', str(result) stderrFile.close() stdoutFile.close() return int(result) == 0 if __name__ == '__main__': global CLUSTER_STATUS service = GeqeAPI.GeqeRestHelper(conf.LOOPBACK_SERVICE) (response, clusterStatus) = service.putStatus({ 'host': platform.node(), 'status': 'RUNNING' }) if response != 200: print 'response: ', response print clusterStatus raise Exception("Could not save cluster status.") thread = JobRunner(conf.LOOPBACK_SERVICE) thread.setDaemon(True) try: thread.start() while thread.isAlive():