forked from alecw/TS
/
tasks.py
1773 lines (1523 loc) · 64.5 KB
/
tasks.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright (C) 2010 Ion Torrent Systems, Inc. All Rights Reserved
"""
Tasks
=====
The ``tasks`` module contains all the Python functions which spawn Celery
tasks in the background.
Not all functions contained in ``tasks`` are actual Celery tasks, only those
that have the ``@app.task`` decorator.
"""
from __future__ import division, absolute_import
from celery import task, group, chord
from celery.task import periodic_task
from celery.utils.log import get_task_logger
from celery.schedules import crontab
from celery.exceptions import SoftTimeLimitExceeded
from django.core import mail
from django.contrib.auth.models import User
from iondb.celery import app
import urllib2
import os
import signal
import string
import random
import subprocess
import shutil
import socket
from django.conf import settings
from django.utils import timezone
import zipfile
import os.path
import sys
import re
import json
import logging
from datetime import timedelta, datetime
import pytz
import time
import tempfile
import urllib
import traceback
import requests
import feedparser
import dateutil
import urlparse
from ion.utils.timeout import timeout
from iondb.utils import raid as raid_utils
from iondb.utils import files as file_utils
logger = get_task_logger(__name__)
def call(*cmd, **kwargs):
if "stdout" not in kwargs:
kwargs["stdout"] = subprocess.PIPE
if "stderr" not in kwargs:
kwargs["stderr"] = subprocess.PIPE
proc = subprocess.Popen(cmd, **kwargs)
stdout, stderr = proc.communicate()
return proc.returncode, stdout, stderr
def run_as_daemon(callback, *args, **kwargs):
"""Disk And Execution MONitor (Daemon)
Fork off a completely separate process and run callback from that process.
"""
# fork the first time (to make a non-session-leader child process)
try:
pid = os.fork()
except OSError, e:
raise RuntimeError("1st fork failed: %s [%d]" % (e.strerror, e.errno))
if pid != 0:
# parent (calling) process is all done
return
# detach from controlling terminal (to make child a session-leader)
os.setsid()
try:
pid = os.fork()
except OSError, e:
raise RuntimeError("2nd fork failed: %s [%d]" % (e.strerror, e.errno))
if pid != 0:
# child process is all done
os._exit(0)
# grandchild process now non-session-leader, detached from parent
# grandchild process must now close all open files
try:
maxfd = os.sysconf("SC_OPEN_MAX")
except (AttributeError, ValueError):
maxfd = 1024
for fd in range(maxfd):
try:
os.close(fd)
except OSError: # ERROR, fd wasn't open to begin with (ignored)
pass
# redirect stdin, stdout and stderr to /dev/null
os.open(os.devnull, os.O_RDWR) # standard input (0)
os.dup2(0, 1)
os.dup2(0, 2)
# Run our callback function with it's arguments
callback(*args, **kwargs)
sys.exit()
# ZipFile doesn't provide a context manager until 2.7/3.2
if hasattr(zipfile.ZipFile, '__exit__'):
ZipContextManager = zipfile.ZipFile
else:
class ZipContextManager():
def __init__(self, *args, **kwargs):
self.zobj = zipfile.ZipFile(*args, **kwargs)
def __enter__(self):
return self.zobj
def __exit__(self, type, value, traceback):
self.zobj.close()
# Unified unzip function
def extract_zip(archive, dest, prefix=None, auto_prefix=False, logger=None):
""" unzip files in archive to destination folder
extracting only files in prefix and omitting prefix from output path.
"""
if not logger:
logger = logging.getLogger(__name__)
# Normalize, clear out or create dest path
dest = os.path.normpath(dest)
if os.path.exists(dest):
if not os.path.isdir(dest):
raise OSError("Must extract zip file to a directory. File already exists: '%s'", dest)
if dest.find(settings.PLUGIN_PATH) == 0:
## Only delete content under PLUGIN_PATH.
delete_that_folder(dest, "Deleting content at destination path '%s'" % dest)
else:
raise OSError("Unable to extract ZIP - directory '%s' already exists", dest)
os.makedirs(dest, 0777)
logger.info("Extracting ZIP '%s' to '%s'", archive, dest)
try:
import pwd, grp
uid = pwd.getpwnam('ionadmin')[2]
gid = grp.getgrnam('ionadmin')[2]
except OSError:
uid = os.getuid()
gid = os.getgid()
extracted_files = []
with ZipContextManager(archive, 'r') as zfobj:
## prefix is a string to extract from zipfile
offset = 0
if auto_prefix and not prefix:
prefix, _ = file_utils.get_common_prefix(zfobj.namelist())
if prefix is not None:
offset = len(prefix) + 1
logger.debug("ZIP extract prefix '%s'", prefix)
for member in zfobj.infolist():
if member.filename[0] == '/':
filename = member.filename[1:]
else:
filename = member.filename
if prefix:
if filename.startswith(prefix):
logger.debug("Extracting '%s' as '%s'", filename, filename[offset:])
#filename = filename[offset:]
else:
logging.debug("Skipping file outside '%s' prefix: '%s'", filename, prefix)
continue
targetpath = os.path.join(dest, filename)
targetpath = os.path.normpath(targetpath)
# Catch files we can't handle properly.
if targetpath.find(dest) != 0:
## Path is no longer under dest after normalization. Prevent extraction (eg. ../../../etc/passwd)
logging.error("ZIP archive contains file '%s' outside destination path: '%s'. Skipping.", filename, dest)
continue
# ZIP archives can have symlinks. Nope.
if ((member.external_attr <<16L) & 0120000):
logging.error("ZIP archive contains symlink: '%s'. Skipping.", member.filename)
continue
if "__MACOSX" in filename:
logging.warn("ZIP archive contains __MACOSX meta folder. Skipping", member.filename)
continue
# Get permission set inside archive
perm = ((member.external_attr >> 16L) & 0777 ) or 0755
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
logger.debug("Creating tree for '%s'", upperdirs)
os.makedirs(upperdirs, perm | 0555)
if filename[-1] == '/':
# upper bits of external_attr should be 04 for folders... ignoring this for now
if not os.path.isdir(targetpath):
logger.debug("ZIP extract dir: '%s'", targetpath)
os.mkdir(targetpath, perm | 0555)
continue
try:
with os.fdopen(os.open(targetpath, os.O_CREAT|os.O_TRUNC|os.O_WRONLY, perm),'wb') as targetfh:
zipfh = zfobj.open(member)
shutil.copyfileobj(zipfh, targetfh)
zipfh.close()
logger.debug("ZIP extract file: '%s' to '%s'", filename, targetpath)
except (OSError, IOError):
logger.exception("Failed to extract '%s':'%s' to '%s'", archive, filename, targetpath)
continue
# Set folder or file last modified time (ctime) to date of file in archive.
try:
#os.utime(targetpath, member.date_time)
os.chown(targetpath, uid, gid)
except (OSError, IOError) as e:
# Non fatal if time and owner fail.
logger.warn("Failed to set time/owner attributes on '%s': %s", targetpath , e)
extracted_files.append(targetpath)
return (prefix, extracted_files)
def unzipPlugin(zipfile, logger=None):
if not logger:
logger = logging.getLogger(__name__)
## Extract plugin to scratch folder. When complete, move to final location.
plugin_path, ext = os.path.splitext(zipfile)
plugin_name = os.path.basename(plugin_path)
# ZIP file must named with plugin name - fragile
# FIXME - handle (1) additions (common for multiple downloads via browser)
# FIXME - handle version string in ZIP archive name
scratch_path = os.path.join(settings.PLUGIN_PATH,"scratch","install-temp",plugin_name)
(prefix, files) = extract_zip(zipfile, scratch_path, auto_prefix=True, logger=logger)
if prefix:
plugin_name = os.path.basename(prefix)
plugin_temp_home = os.path.join(scratch_path, prefix)
try:
# Convert script into PluginClass, get info by introspection
from iondb.plugins.manager import pluginmanager
script, islaunch = pluginmanager.find_pluginscript(plugin_temp_home, plugin_name)
logger.debug("Got script: %s", script)
from ion.plugin.loader import cache as plugincache
ret = plugincache.load_module(plugin_name, script)
cls = plugincache.get_plugin(plugin_name)
p = cls()
final_name = p.name # what the plugin calls itself, regardless of ZIP file name
logger.info("Plugin calls itself: '%s'", final_name)
except:
logger.exception("Unable to interrogate plugin name from: '%s'", zipfile)
final_name = plugin_name
#move to the plugin dir
# New extract_zip removes prefix from extracted files.
# But still writes to file_name
try:
final_install_dir = os.path.join(settings.PLUGIN_PATH, final_name)
if os.path.exists(final_install_dir) and (final_install_dir != settings.PLUGIN_PATH):
logger.info("Deleting old copy of plugin at '%s'", final_install_dir)
delete_that_folder(final_install_dir, "Error Deleting old copy of plugin at '%s'" % final_install_dir)
parent_folder = os.path.dirname(final_install_dir)
if not os.path.exists(parent_folder):
logger.info("Creating path for plugin '%s' for '%s'", parent_folder, final_install_dir)
os.makedirs(parent_folder, 0555)
logger.info("Moving plugin from temp extract folder '%s' to final location: '%s'", plugin_temp_home, final_install_dir)
shutil.move(plugin_temp_home, final_install_dir)
delete_that_folder(scratch_path, "Deleting plugin install scratch folder")
# Moving files reverts owner to root, change back to ionadmin
fix_plugin_owner(final_install_dir)
except (IOError, OSError):
logger.exception("Failed to move plugin from temp extract folder '%s' to final location: '%s'", plugin_temp_home, final_install_dir)
raise
# Now that it has been downloaded,
# convert pre-plugin into real db plugin object
try:
from iondb.plugins.manager import pluginmanager
(new_plugin, updated) = pluginmanager.install(final_name, final_install_dir)
except ValueError:
logger.exception("Failed to install plugin")
#delete_that_folder(final_install_dir)
return {
"plugin": final_name,
"path": final_install_dir,
"files": files,
}
@app.task
def echo(message, wait=0.0):
time.sleep(wait)
logger.info("Logged: " + message)
print(message)
@app.task
def delete_that_folder(directory, message):
def delete_error(func, path, info):
logger.error("Failed to delete %s: %s", path, message)
if os.path.exists(directory):
logger.info("Deleting %s", directory)
shutil.rmtree(directory, onerror=delete_error)
#N.B. Run as celery task because celery runs with root permissions
@app.task
def removeDirContents(folder_path):
for file_object in os.listdir(folder_path):
file_object_path = os.path.join(folder_path, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
elif os.path.islink(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
def downloadChunks(url):
"""Helper to download large files"""
baseFile = os.path.basename(url)
uuid_path = ''.join([random.choice(string.letters + string.digits) for i in range(10)])
#move the file to a more uniq path
os.umask(0002)
temp_path = settings.TEMP_PATH
temp_path_uniq = os.path.join(temp_path,uuid_path)
os.mkdir(temp_path_uniq)
try:
file = os.path.join(temp_path_uniq,baseFile)
req = urllib2.urlopen(url)
try:
total_size = int(req.info().getheader('Content-Length').strip())
except:
total_size = 0
downloaded = 0
CHUNK = 256 * 10240
with open(file, 'wb') as fp:
shutil.copyfileobj(req, fp, CHUNK)
url = req.geturl()
except urllib2.HTTPError, e:
logger.error("HTTP Error: %d '%s'",e.code , url)
delete_that_folder(temp_path_uniq, "after download error")
return False
except urllib2.URLError, e:
logger.error("URL Error: %s '%s'",e.reason , url)
delete_that_folder(temp_path_uniq, "after download error")
return False
except:
logger.exception("Other error downloading from '%s'", url)
delete_that_folder(temp_path_uniq, "after download error")
return False
return file, url
@app.task
def downloadGenome(url, genomeID):
"""download a genome, and update the genome model"""
downloadChunks(url)
from . import zeroinstallHelper
# Helper for downloadPlugin task
def downloadPluginZeroInstall(url, plugin, logger=None):
""" To be called for zeroinstall xml feed urls.
Returns plugin prototype, not full plugin model object.
"""
try:
downloaded = zeroinstallHelper.downloadZeroFeed(url)
feedName = zeroinstallHelper.getFeedName(url)
except:
logger.exception("Failed to fetch zeroinstall feed")
plugin.status["installStatus"] = "failed"
plugin.status["result"] = str(sys.exc_info()[1][0])
return False
# The url field stores the zeroinstall feed url
plugin.url = url
plugin.name = feedName.replace(" ","")
if not downloaded:
logger.error("Failed to download url: '%s'", url)
plugin.status["installStatus"] = "failed"
plugin.status["result"] = "processed"
return False
plugin.status["installStatus"] = "installed"
# Find plugin in subdirectory of extracted and installed path
for d in os.listdir(downloaded):
# Skip MACOSX attribute zip artifact
if d == '__MACOSX':
continue
nestedpath = os.path.join(downloaded, d)
if not os.path.isdir(nestedpath):
continue
# only take subdirectory with launch.sh script
if os.path.exists(os.path.join(nestedpath, 'launch.sh')):
plugin.path = os.path.normpath(nestedpath)
break
if os.path.exists(os.path.join(nestedpath, plugin.name + '.py')):
plugin.path = os.path.normpath(nestedpath)
break
else:
# Plugin expanded without top level folder
plugin.path = downloaded
# assert launch.sh exists?
# Extraction can leave files owned by root, change back to ionadmin
fix_plugin_owner(plugin.path)
plugin.status["result"] = "0install"
# Other fields we can get from zeroinstall feed?
logger.debug(plugin)
# Version is parsed during install - from launch.sh, ignoring feed value
return plugin
# Helper for downloadPlugin task
def downloadPluginArchive(url, plugin, logger=None):
ret = downloadChunks(url)
if not ret:
plugin.status["installStatus"] = "failed"
plugin.status["result"] = "failed to download '%s'" % url
return False
downloaded, url = ret
pdata = unzipPlugin(downloaded, logger=logger)
plugin.name = pdata['plugin'] or os.path.splitext(os.path.basename(url))[0]
plugin.path = pdata['path'] or os.path.join(settings.PLUGIN_PATH, plugin.name )
#clean up archive file and temp dir (archive should be only file in dir)
os.unlink(downloaded)
os.rmdir(os.path.dirname(downloaded))
if unzipStatus:
plugin.status["result"] = "unzipped"
else:
plugin.status["result"] = "failed to unzip"
return True
def fix_plugin_owner(target_path):
# Fix ownership - zeroinstall can leave files owned by root
try:
import pwd, grp
uid = pwd.getpwnam('ionadmin')[2]
gid = grp.getgrnam('ionadmin')[2]
except OSError:
uid = os.getuid()
gid = os.getgid()
logger.info("Changing plugin at '%s' to owner %d:%d", target_path, uid, gid)
#try:
# os.chown(target_path, uid, gid)
#except (OSError, IOError) as e:
# logger.warn("Failed to set time/owner attributes on '%s': %s", target_path , e)
file_walker = (
os.path.join(root, f)
for root, _, files in os.walk( target_path )
for f in files
)
for f in file_walker:
if not os.path.isfile(f):
continue
try:
os.chown(f, uid, gid)
except (OSError, IOError) as e:
# Non fatal if time and owner fail.
logger.warn("Failed to set time/owner attributes on '%s': %s", f , e)
return
@app.task
def downloadPlugin(url, plugin=None, zipFile=None):
"""download a plugin, extract and install it"""
if not plugin:
from iondb.rundb import models
plugin = models.Plugin.objects.create(name='Unknown', version='Unknown', status={})
plugin.status["installStatus"] = "downloading"
#normalise the URL
url = urlparse.urlsplit(url).geturl()
if not zipFile:
if url.endswith(".xml"):
status = downloadPluginZeroInstall(url, plugin, logger=logger)
logger.error("xml") # logfile
else:
status = downloadPluginArchive(url, plugin, logger=logger)
logger.error("zip") # logfile
if not status:
# FIXME - Errors!
installStatus = plugin.status.get('installStatus', 'Unknown')
result = plugin.status.get('result', 'unknown')
msg = "Plugin install '%s', Result: '%s'" % (installStatus, result)
logger.error(msg) # logfile
from iondb.rundb import models
models.Message.error(msg) # UI message
return False
else:
# Extract zipfile
scratch_path = os.path.join(settings.PLUGIN_PATH,"scratch")
zip_file = os.path.join(scratch_path, zipFile)
plugin.status["installStatus"] = "extracting zip"
try:
ret = unzipPlugin(zip_file)
except:
logger.exception("Failed to unzip Plugin: '%s'", zip_file)
finally:
#remove the zip file
os.unlink(zip_file)
plugin.name = ret['plugin']
plugin.path = ret['path']
plugin.status["installStatus"] = "installing from zip"
# Now that it has been downloaded,
# convert pre-plugin into real db plugin object
try:
from iondb.plugins.manager import pluginmanager
(new_plugin, updated) = pluginmanager.install(plugin.name, plugin.path)
except ValueError:
logger.exception("Plugin rejected by installer. Check syntax and content.")
return None
# Copy over download status messages and url
new_plugin.status = plugin.status
if plugin.url:
new_plugin.url = plugin.url
new_plugin.save()
logger.info("Successfully downloaded and installed plugin %s v%s from '%s'", new_plugin.name, new_plugin.version, url)
return new_plugin
@app.task
def downloadPublisher(url, zip_file=None):
from iondb.rundb.models import Message
#normalise the URL
if not zip_file:
url = urlparse.urlsplit(url).geturl()
ret = downloadChunks(url)
if not ret:
return False
(zip_file, url) = ret
pub_name = os.path.splitext(os.path.basename(url))[0]
else:
pub_name = os.path.splitext(os.path.basename(zip_file))[0]
# Extract zipfile - yes, plugins scratch folder, not publisher specific.
scratch_path = os.path.join(settings.PLUGIN_PATH, "scratch", "publisher-temp", pub_name)
zip_file = os.path.join(settings.PLUGIN_PATH, "scratch", zip_file)
try:
(prefix, files) = extract_zip(zip_file, scratch_path, auto_prefix=True, logger=logger)
if prefix:
# Good - ZIP has top level folder with publisher name, use that name instead.
pub_name = os.path.basename(prefix)
base_path = os.path.join(scratch_path, pub_name)
else:
base_path = scratch_path
# make sure we have a publisher_meta.json file
if not os.path.exists(os.path.join(base_path, 'publisher_meta.json')):
raise Exception('Missing publisher_meta.json!')
# Move from temp folder into publisher
pub_final_path = os.path.join("/results/publishers/", pub_name)
if os.path.exists(pub_final_path):
# existing publisher will be replaced
delete_that_folder(pub_final_path, "Error removing old copy of publisher at '%s'" % pub_final_path)
shutil.move(base_path, pub_final_path)
## Rescan Publishers to complete install
from iondb.rundb import publishers
publishers.search_for_publishers()
msg = "Successfully downloaded and installed publisher %s from ZIP archive" % (pub_name,)
logger.info(msg)
Message.success(msg)
except Exception as err:
msg = "Failed to install publisher from %s. ERROR: %s" % (zip_file, err)
Message.error(msg)
logger.exception(msg)
finally:
#remove the zip file
os.unlink(zip_file)
delete_that_folder(scratch_path, "Error deleting temp publisher zip folder at '%s'" % scratch_path)
return
@app.task
def contact_info_flyaway():
"""This invokes an external on the path which performs 3 important steps:
Pull contact information from the DB
Black magic
Axeda has the contact information
"""
logger.info("The user updated their contact information.")
cmd = ["/opt/ion/RSM/updateContactInfo.py"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if stderr:
logger.warning("updateContactInfo.py output error information:\n%s" % stderr)
return stdout
@app.task
def static_ip(address, subnet, gateway):
"""Usage: TSstaticip [options]
--ip Define host IP address
--nm Define subnet mask (netmask)
--nw Define network ID
--bc Define broadcast IP address
--gw Define gateway/router IP address
"""
cmd = ["/usr/sbin/TSstaticip",
"--ip", address,
"--nm", subnet,
"--gw", gateway,
]
logger.info("Network: Setting host static, '%s'" % " ".join(cmd))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if stderr:
logger.warning("Network error: %s" % stderr)
return stdout
@app.task
def dhcp():
"""Usage: TSstaticip [options]
--remove Sets up dhcp, removing any static IP settings
"""
cmd = ["/usr/sbin/TSstaticip", "--remove"]
logger.info("Network: Setting host DHCP, '%s'" % " ".join(cmd))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if stderr:
logger.warning("Network error: %s" % stderr)
return stdout
@app.task
def proxyconf(address, port, username, password):
"""Usage: TSsetproxy [options]
--address Proxy address (http://proxy.net)
--port Proxy port number
--username Username for authentication
--password Password for authentication
--remove Removes proxy setting
"""
cmd = ["/usr/sbin/TSsetproxy",
"--address", address,
"--port", port,
"--username", username,
"--password", password,
]
logger.info("Network: Setting proxy settings, '%s'" % " ".join(cmd))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if stderr:
logger.warning("Network error: %s" % stderr)
return stdout
@app.task
def ax_proxy():
"""Usage: TSsetproxy [options]
--remove Removes proxy setting
"""
cmd = ["/usr/sbin/TSsetproxy", "--remove"]
logger.info("Network: Removing proxy settings, '%s'" % " ".join(cmd))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if stderr:
logger.warning("Network error: %s" % stderr)
return stdout
@app.task
def dnsconf(dns):
"""Usage: TSdns [options]
--dns Define one or more comma delimited dns servers
"""
cmd = ["/usr/sbin/TSdns", "--dns", dns]
logger.info("Network: Changing DNS settings, '%s'" % " ".join(cmd))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if stderr:
logger.warning("Network error: %s" % stderr)
return stdout
@app.task
def updateOneTouch():
sys.path.append("/opt/ion/onetouch")
from onetouch import findHosts
#find onetouches
if os.path.exists("/tmp/OTlock"):
#remove the OTstatus file if it exists
if os.path.exists("/tmp/OTstatus"):
os.unlink("/tmp/OTstatus")
#touch the status file
otStatus = open("/tmp/OTstatus",'w').close()
#run the onetouch update script
try:
updateStatus = findHosts.findOneTouches()
except:
updateStatus = "FAILED"
otStatus = open("/tmp/OTstatus",'w')
otStatus.write(str(updateStatus) + "\n")
otStatus.write( "DONE\n")
otStatus.close()
#now remove the lock
os.unlink("/tmp/OTlock")
return True
return False
def make_reference_paths(reference):
reference.reference_path = os.path.join(settings.TMAP_DISABLED_DIR, reference.short_name)
os.mkdir(reference.reference_path)
reference.save()
return os.path.join(reference.reference_path, reference.short_name + ".fasta")
@app.task(queue="slowlane")
def unzip_reference(reference_id, reference_file=None):
from iondb.rundb import models
reference = models.ReferenceGenome.objects.get(pk=reference_id)
reference.status = "preprocessing"
reference.save()
zip_path = reference.file_monitor.full_path()
destination = make_reference_paths(reference)
try:
archive = zipfile.ZipFile(zip_path)
source_file = archive.open(reference_file, 'rU')
dest_file = open(destination, 'w')
shutil.copyfileobj(source_file, dest_file)
archive.close()
except Exception as err:
logger.error("Could extract fasta zipped reference id=%d at %s" % (reference.pk, zip_path))
raise err
reference.file_monitor.delete()
return destination
@app.task(queue="slowlane")
def copy_reference(reference_id):
from iondb.rundb import models
reference = models.ReferenceGenome.objects.get(pk=reference_id)
reference.status = "preprocessing"
reference.save()
fasta_path = reference.file_monitor.full_path()
destination = make_reference_paths(reference)
shutil.move(fasta_path, destination)
reference.file_monitor.delete()
return destination
@app.task(queue="slowlane")
def build_tmap_index(reference_id):
""" Provides a way to kick off the tmap index generation
this should spawn a process that calls the build_genome_index.pl script
it may take up to 3 hours.
The django server should contacts this method from a view function
When the index creation processes has exited, cleanly or other wise
a callback will post to a url that will update the record for the library data
letting the genome manager know that this now exists
until then this genome will be listed in a unfinished state.
"""
from iondb.rundb import models
reference = models.ReferenceGenome.objects.get(pk=reference_id)
reference.status = "indexing"
reference.save()
fasta = os.path.join(reference.reference_path , reference.short_name + ".fasta")
logger.debug("TMAP %s rebuild, for reference %s(%d) using fasta %s"%
(settings.TMAP_VERSION, reference.short_name, reference.pk, fasta))
cmd = [
'/usr/local/bin/build_genome_index.pl',
"--auto-fix",
"--fasta", fasta,
"--genome-name-short", reference.short_name,
"--genome-name-long", reference.name,
"--genome-version", reference.version
]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=settings.TMAP_DIR, preexec_fn=os.setsid)
def termiante_handler(signal, frame):
if proc.poll() is None:
proc.terminate()
raise SystemExit
signal.signal(signal.SIGTERM, termiante_handler)
stdout, stderr = proc.communicate()
ret = proc.returncode
if ret == 0:
logger.debug("Successfully built the TMAP %s index for %s" %
(settings.TMAP_VERSION, reference.short_name))
reference.status = 'complete'
reference.index_version = settings.TMAP_VERSION
reference.save()
reference.enabled = True
reference.enable_genome()
else:
logger.error('TMAP index rebuild "%s" failed:\n%s' %
(" ".join(cmd), stderr))
reference.status = 'error'
reference.verbose_error = json.dumps((stdout, stderr, ret))
reference.save()
reference.enabled = False
reference.disable_genome()
return ret == 0
def IonReporterWorkflows(autorun=True):
try:
from iondb.rundb import models
if autorun:
IonReporterUploader= models.Plugin.objects.get(name="IonReporterUploader_V1_0",selected=True,active=True,autorun=True)
else:
IonReporterUploader= models.Plugin.objects.get(name="IonReporterUploader_V1_0",selected=True,active=True)
logging.error(IonReporterUploader.config)
config = IonReporterUploader.config
except models.Plugin.DoesNotExist:
error = "IonReporterUploader V1.0 Plugin Not Found."
logging.error(error)
return False, error
try:
headers = {"Authorization" : config["token"] }
url = config["protocol"] + "://" + config["server"] + ":" + config["port"] +"/grws/analysis/wflist"
logging.info(url)
except KeyError:
error = "IonReporterUploader V1.0 Plugin Config is missing needed data."
logging.exception(error)
return False, error
try:
#using urllib2 right now because it does NOT verify SSL certs
req = urllib2.Request(url = url, headers = headers)
response = urllib2.urlopen(req)
content = response.read()
content = json.loads(content)
workflows = content["workflows"]
return True, workflows
except urllib2.HTTPError, e:
error = "IonReporterUploader V1.0 could not contact the server."
content = e.read()
logging.error("Error: %s\n%s", error, content)
return False, error
except:
error = "IonReporterUploader V1.0 could not contact the server."
logging.exception(error)
return False, error
def IonReporterVersion(plugin):
"""
This is a temp thing for 3.0. We need a way for IRU to get the versions
this will do that for us.
"""
#if version is pased in use that plugin name instead
if not plugin:
plugin = "IonReporterUploader"
try:
from iondb.rundb import models
IonReporterUploader= models.Plugin.objects.get(name=plugin,selected=True,active=True)
logging.error(IonReporterUploader.config)
config = IonReporterUploader.config
except models.Plugin.DoesNotExist:
error = plugin + " Plugin Not Found."
logging.exception(error)
return False, error
try:
headers = {"Authorization" : config["token"] }
url = config["protocol"] + "://" + config["server"] + ":" + config["port"] + "/grws_1_2/data/versionList"
logging.info(url)
except KeyError:
error = plugin + " Plugin Config is missing needed data."
logging.debug(plugin +" config: " + config)
logging.exception(error)
return False, error
try:
#using urllib2 right now because it does NOT verify SSL certs
req = urllib2.Request(url = url, headers = headers)
response = urllib2.urlopen(req)
content = response.read()
content = json.loads(content)
versions = content["Version List"]
return True, versions
except urllib2.HTTPError, e:
error = plugin + " could not contact the server. No versions will be returned"
content = e.read()
logging.error("Error: %s\n%s", error, content)
except:
error = plugin + " could not contact the server. No versions will be returned"
logging.exception(error)
return False, error
@periodic_task(run_every=timedelta(days=1), expires=600, queue="periodic")
def scheduled_update_check():
from iondb.rundb import models
try:
packages = check_updates()
upgrade_message = models.Message.objects.filter(tags__contains="new-upgrade")
if packages:
if not upgrade_message.all():
models.Message.info('There is an update available for your Torrent Server. <a class="btn btn-success" href="/admin/update">Update Now</a>', tags='new-upgrade', route=models.Message.USER_STAFF)
download_now = models.GlobalConfig.get().enable_auto_pkg_dl
if download_now:
async = download_updates.delay()
logger.debug("Auto starting download of %d packages in task %s" % (len(packages), async.task_id))
else:
upgrade_message.delete()
except Exception as err:
logger.error("TSconfig raised '%s' during a scheduled update check." % err)
models.GlobalConfig.objects.update(ts_update_status="Update failure")
raise
@app.task
def check_updates():
"""Currently this is passed a TSConfig object; however, there might be a
smoother design for this control flow.
"""
from iondb.rundb import models
try:
import ion_tsconfig.TSconfig
tsconfig = ion_tsconfig.TSconfig.TSconfig()
enable_security_update = models.GlobalConfig.get().enable_auto_security
tsconfig.set_securityinstall(enable_security_update)
packages = tsconfig.TSpoll_pkgs()
except Exception as err:
logger.error("TSConfig raised '%s' during update check." % err)
models.GlobalConfig.objects.update(ts_update_status="Update failure")
raise
return packages
@app.task
def download_updates(auto_install=False):
from iondb.rundb import models
try:
import ion_tsconfig.TSconfig
tsconfig = ion_tsconfig.TSconfig.TSconfig()
enable_security_update = models.GlobalConfig.get().enable_auto_security
tsconfig.set_securityinstall(enable_security_update)
downloaded = tsconfig.TSexec_download()
except Exception as err:
logger.error("TSConfig raised '%s' during a download" % err)
models.GlobalConfig.objects.update(ts_update_status="Download failure")
raise
logger.debug("Finished downloading %d packages" % len(downloaded))
async = None
if downloaded and auto_install:
new_tsconfig = tsconfig.TSexec_update_tsconfig()
if new_tsconfig:
logger.debug("Installed ion-tsconfig package")
async = install_updates.delay()
logger.debug("Auto starting install of %d packages in task %s" % (len(downloaded), async.task_id))
return downloaded, async