/
runner2.py
157 lines (138 loc) · 4.45 KB
/
runner2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
File: runner2.py
Author: SpaceLis
Changes:
0.0.1 The first version
Description:
Running experiments by giving a delta configuration
"""
__version__ = '0.0.1'
import sys
import os
import traceback
import json
import logging
import logging.config
import argparse
from StringIO import StringIO
from multiprocessing import cpu_count
from multiprocessing import RLock
from multiprocessing import Pool
from twhere.exprmodels import experiment
from twhere.config import Configuration
try:
import affinity
affinity.set_process_affinity_mask(os.getpid(), (1 << cpu_count()) - 1)
except: # pylint: disable-msg=W0702
print >>sys.stderr, 'WARN: Fail on setting CPU affinity, check cpu loads!'
CITY = dict(zip(['NY', 'CH', 'LA', 'SF'],
['27485069891a7938',
'1d9a5370a355ab0c',
'3b77caf94bfc81fe',
'5a110d312052166f']))
LOGGING_CONF = {'version': 1,
'formatters': {
'simple': {'format':
"%(asctime)s %(process)d %(name)s "
"[%(levelname)s] %(message)s"}
},
'handlers': {
'console': {'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'simple',
'stream': 'ext://sys.stderr'}
},
'root': {
'level': 'INFO',
'handlers': ['console', ]
}
}
OUTPUT_LOCK = RLock()
def worker(lconf):
""" A worker function for wrapping prepare_and_run() with
CPU affinity assignment.
"""
try:
prepare_and_run(lconf)
with OUTPUT_LOCK:
print '[SUCCEEDED]', lconf
except Exception as e:
exc_buffer = StringIO()
traceback.print_exc(file=exc_buffer)
logging.error('Uncaught exception in worker process:\n%s',
exc_buffer.getvalue())
raise e
def pooling(lconf, poolsize=10):
""" Running the list of conf in a multiprocess pool
"""
pool = Pool(poolsize)
pool.map(worker, lconf)
def prepare_and_run(deltaconf):
""" Prepare the configuration and run experiments
"""
conf = Configuration()
conf.update(deltaconf)
if conf['expr.city.id'] is None:
conf['expr.city.id'] = CITY[conf['expr.city.name']]
experiment(conf)
def setup_logging(logconf):
""" Setup logging
"""
logging.config.dictConfig(logconf)
def parse_parameter():
""" Parse the argument
"""
parser = argparse.ArgumentParser(description='Running Trail Prediction')
parser.add_argument(
'-f', dest='conffile',
action='store',
metavar='FILE',
default=None,
help='Running with delta configuration from the FILE')
parser.add_argument(
'-s',
dest='confstr',
action='store',
metavar='JSON',
default=None,
help='Running with the delta configuration from the json string')
parser.add_argument(
'-p', dest='pooled',
action='store',
nargs=2,
metavar=('POOLSIZE', 'FILE'),
default=None,
help='Running a list of configuration in a multiprocess pool')
parser.add_argument(
'--loglevel', dest='log_level',
action='store',
metavar='INFO',
default='INFO',
help='The level of log output')
args = parser.parse_args()
return args
if __name__ == '__main__':
appargs = parse_parameter()
if appargs.log_level in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
LOGGING_CONF['root']['level'] = appargs.log_level
setup_logging(LOGGING_CONF)
LOGGER = logging.getLogger(__name__)
LOGGER.debug('DEBUG is enabled')
try:
import resource
resource.setrlimit(resource.RLIMIT_AS, (1500 * 1024 * 1024L, -1L))
except ValueError as err:
LOGGER.warn('Failed set resource limits. Because {0}'.
format(err.message))
if appargs.pooled is not None:
with open(appargs.pooled[1]) as fconf:
pooling([json.loads(l) for l in fconf], int(appargs.pooled[0]))
exit(0)
if appargs.conffile is not None:
with open(appargs.conffile) as fconf:
dconf = json.loads(fconf.read())
if appargs.confstr is not None:
dconf = json.loads(appargs.confstr)
prepare_and_run(dconf)