forked from djaodjin/drop
-
Notifications
You must be signed in to change notification settings - Fork 0
/
__init__.py
5978 lines (5454 loc) · 239 KB
/
__init__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
#
# Copyright (c) 2015, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Implements workspace management.
The workspace manager script is used to setup a local machine
with third-party prerequisites and source code under revision
control such that it is possible to execute a development cycle
(edit/build/run) on a local machine.
The script will email build reports when the --mailto command line option
is specified. There are no sensible default values for the following
variables thus those should be set in the shell environment before
invoking the script.
dwsEmail=
smtpHost=
smtpPort=
dwsSmtpLogin=
dwsSmtpPasswd=
"""
# Primary Author(s): Sebastien Mirolo <smirolo@fortylines.com>
#
# Requires Python 2.7 or above.
__version__ = None
import datetime, hashlib, inspect, json, logging, logging.config, re, optparse
import os, shutil, socket, stat, subprocess, sys, tempfile, urllib2, urlparse
import xml.dom.minidom, xml.sax
import cStringIO
# \todo executable used to return a password compatible with sudo. This is used
# temporarly while sudo implementation is broken when invoked with no tty.
ASK_PASS = ''
# filename for context configuration
CONTEXT_FILENAME = None
# Extensions for more complex prerequisite setup
CUSTOM_STEPS = None
# When True, all commands invoked through shell_command() are printed
# but not executed.
DO_NOT_EXECUTE = False
# Global variables that contain all encountered errors.
ERRORS = []
# When processing a project dependency index file, all project names matching
# one of the *EXCLUDE_PATS* will be considered non-existant.
EXCLUDE_PATS = []
# Log commands output
LOGGER = None
LOGGER_BUFFER = None
LOGGER_BUFFERING_COUNT = 0
# Pattern used to search for logs to report through email.
LOG_PAT = None
# When True, the log object is not used and output is only
# done on sys.stdout.
NO_LOG = False
# Address to email log reports to.
MAILTO = []
# When True, *find_lib* will prefer static libraries over dynamic ones if both
# exist for a specific libname. This should match .LIBPATTERNS in prefix.mk.
STATIC_LIB_FIRST = True
# When True, the script runs in batch mode and assumes the default answer
# for every question where it would have prompted the user for an answer.
USE_DEFAULT_ANSWER = False
# Directories where things get installed
INSTALL_DIRS = ['bin', 'include', 'lib', 'libexec', 'etc', 'share']
# distributions per native package managers
APT_DISTRIBS = ['Debian', 'Ubuntu']
YUM_DISTRIBS = ['Fedora', 'CentOS']
PORT_DISTRIBS = ['Darwin']
# Real uid and gid when the -u,--user and/or -g,--group command
# line arguments are used.
USER = None
GROUP = None
CONTEXT = None
INDEX = None
class Error(RuntimeError):
'''This type of exception is used to identify "expected"
error condition and will lead to a useful message.
Other exceptions are not caught when *__main__* executes,
and an internal stack trace will be displayed. Exceptions
which are not *Error*s are concidered bugs in the workspace
management script.'''
def __init__(self, msg='unknow error', code=1, project_name=None):
RuntimeError.__init__(self)
self.code = code
self.msg = msg
self.project_name = project_name
def __str__(self):
if self.project_name:
return ':'.join([self.project_name, str(self.code), ' error']) \
+ ' ' + self.msg + '\n'
return 'error: ' + self.msg + ' (error ' + str(self.code) + ')\n'
class CircleError(Error):
'''Thrown when a circle has been detected while doing
a topological traversal of a graph.'''
def __init__(self, connected):
Error.__init__(
self, msg="detected a circle within %s" % ' '.join(connected))
class MissingError(Error):
'''This error is thrown whenever a project has missing prerequisites.'''
def __init__(self, project_name, prerequisites):
Error.__init__(self, 'The following prerequisistes are missing: ' \
+ ' '.join(prerequisites), 2, project_name)
class Context(object):
'''The workspace configuration file contains environment variables used
to update, build and package projects. The environment variables are roots
of the general dependency graph as most other routines depend on srcTop
and buildTop at the least.'''
config_name = 'dws.mk'
indexName = 'dws.xml'
def __init__(self):
# Two following variables are used by interactively change the make
# command-line.
self.nonative = False
self.tunnel_point = None
self.targets = []
self.overrides = []
site_top = Pathname('siteTop',
{'description':
'Root of the tree where the website is generated\n'\
' and thus where *remoteSiteTop* is cached\n'\
' on the local system',
'default':os.getcwd()})
remote_site_top = Pathname('remoteSiteTop',
{'description':
'Root of the remote tree that holds the published website\n'
' (ex: url:/var/cache).',
'default':''})
install_top = Pathname('installTop',
{'description':'Root of the tree for installed bin/,'\
' include/, lib/, ...',
'base':'siteTop', 'default':''})
# We use installTop (previously siteTop), such that a command like
# "dws build *remoteIndex* *siteTop*" run from a local build
# directory creates intermediate and installed files there while
# checking out the sources under siteTop.
# It might just be my preference...
build_top = Pathname('buildTop',
{'description':'Root of the tree where intermediate'\
' files are created.',
'base':'siteTop', 'default':'build'})
src_top = Pathname('srcTop',
{'description':
'Root of the tree where the source code under revision\n'
' control lives on the local machine.',
'base': 'siteTop',
'default':'reps'})
self.environ = {'buildTop': build_top,
'srcTop' : src_top,
'patchTop': Pathname('patchTop',
{'description':'Root of the tree where patches are stored',
'base':'siteTop',
'default':'patch'}),
'binDir': Pathname('binDir',
{'description':'Root of the tree where executables are installed',
'base':'installTop'}),
'installTop': install_top,
'includeDir': Pathname('includeDir',
{'description':'Root of the tree where include files are installed',
'base':'installTop'}),
'libDir': Pathname('libDir',
{'description':'Root of the tree where libraries are installed',
'base':'installTop'}),
'libexecDir': Pathname('libexecDir',
{'description':'Root of the tree where executable helpers'\
' are installed',
'base':'installTop'}),
'etcDir': Pathname('etcDir',
{'description':
'Root of the tree where configuration files for the local\n'
' system are installed',
'base':'installTop'}),
'shareDir': Pathname('shareDir',
{'description':'Directory where the shared files are installed.',
'base':'installTop'}),
'siteTop': site_top,
'logDir': Pathname('logDir',
{'description':'Directory where the generated log files are'\
' created',
'base':'siteTop',
'default':'log'}),
'indexFile': Pathname('indexFile',
{'description':'Index file with projects dependencies information',
'base':'siteTop',
'default':os.path.join('resources',
os.path.basename(sys.argv[0]) + '.xml')}),
'remoteSiteTop': remote_site_top,
'remoteSrcTop': Pathname('remoteSrcTop',
{'description':
'Root of the tree on the remote machine where repositories\n'\
' are located.',
'base':'remoteSiteTop',
'default':'reps'}),
'remoteIndex': Pathname('remoteIndex',
{'description':
'Url to the remote index file with projects dependencies\n'\
' information',
'base':'remoteSiteTop',
'default':'reps/dws.git/dws.xml'}),
'darwinTargetVolume': Single('darwinTargetVolume',
{'description':
'Destination of installed packages on a Darwin local\n'\
' machine. Installing on the "LocalSystem" requires\n'\
' administrator privileges.',
'choices': {'LocalSystem':
'install packages on the system root for all users',
'CurrentUserHomeDirectory':
'install packages for the current user only'}}),
'distHost': HostPlatform('distHost'),
'smtpHost': Variable('smtpHost',
{'description':'Hostname for the SMTP server through'\
' which logs are sent.',
'default':'localhost'}),
'smtpPort': Variable('smtpPort',
{'description':'Port for the SMTP server through'\
' which logs are sent.',
'default':'5870'}),
'dwsSmtpLogin': Variable('dwsSmtpLogin',
{'description':
'Login on the SMTP server for the user through which\n'\
' logs are sent.'}),
'dwsSmtpPasswd': Variable('dwsSmtpPasswd',
{'description':
'Password on the SMTP server for the user through which\n'\
' logs are sent.'}),
'dwsEmail': Variable('dwsEmail',
{'description':
'dws occasionally emails build reports (see --mailto\n'
' command line option). This is the address that will\n'\
' be shown in the *From* field.',
'default':os.environ['LOGNAME'] + '@localhost'})}
self.build_top_relative_cwd = None
self.config_filename = None
def __getattr__(self, name):
return self.value(name)
def base(self, name):
'''Returns a basename of the uri/path specified in variable *name*.
We do not use os.path.basename directly because it wasn't designed
to handle uri nor does urlparse was designed to handle git/ssh locators.
'''
locator = self.value(name)
look = re.match(r'\S+@\S+:(\S+)', locator)
if look:
return os.path.splitext(os.path.basename(look.group(1)))[0]
look = re.match(r'https?:(\S+)', locator)
if look:
uri = urlparse.urlparse(locator)
return os.path.splitext(os.path.basename(uri.path))[0]
return os.path.splitext(os.path.basename(locator))[0]
def bin_build_dir(self):
'''Returns the bin/ directory located inside buildTop.'''
return os.path.join(self.value('buildTop'), 'bin')
@staticmethod
def derived_helper(name):
'''Absolute path to a file which is part of drop helper files
located in the share/dws subdirectory. The absolute directory
name to share/dws is derived from the path of the script
being executed as such: dirname(sys.argv[0])/../share/dws.'''
return os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))),
'share', 'dws', name)
# That code does not work when we are doing dws make (no recurse).
# return os.path.join(self.value('buildTop'),'share','dws',name)
def log_path(self, name):
'''Absolute path to a file in the local system log
directory hierarchy.'''
return os.path.join(self.value('logDir'), name)
def remote_src_path(self, name):
'''Absolute path to access a repository on the remote machine.'''
return os.path.join(self.value('remoteSrcTop'), name)
def remote_host(self):
'''Returns the host pointed by *remoteSiteTop*'''
uri = urlparse.urlparse(self.value('remoteSiteTop'))
hostname = uri.netloc
if not uri.netloc:
# If there is no protocol specified, the hostname
# will be in uri.scheme (That seems like a bug in urlparse).
hostname = uri.scheme
return hostname
def cwd_project(self):
'''Returns a project name derived out of the current directory.'''
if not self.build_top_relative_cwd:
self.environ['buildTop'].default = os.path.dirname(os.getcwd())
log_info('no workspace configuration file could be ' \
+ 'found from ' + os.getcwd() \
+ ' all the way up to /. A new one, called ' + self.config_name\
+ ', will be created in *buildTop* after that path is set.')
self.config_filename = os.path.join(self.value('buildTop'),
self.config_name)
self.save()
self.locate()
if os.path.realpath(os.getcwd()).startswith(
os.path.realpath(self.value('buildTop'))):
top = os.path.realpath(self.value('buildTop'))
elif os.path.realpath(os.getcwd()).startswith(
os.path.realpath(self.value('srcTop'))):
top = os.path.realpath(self.value('srcTop'))
else:
raise Error("You must run dws from within a subdirectory of "\
"buildTop or srcTop")
prefix = os.path.commonprefix([top, os.getcwd()])
return os.getcwd()[len(prefix) + 1:]
def db_pathname(self):
'''Absolute pathname to the project index file.'''
if not str(self.environ['indexFile']):
filtered = filter_rep_ext(self.value('remoteIndex'))
if filtered != self.value('remoteIndex'):
prefix = self.value('remoteSrcTop')
if not prefix.endswith(':') and not prefix.endswith(os.sep):
prefix = prefix + os.sep
self.environ['indexFile'].default = \
self.src_dir(os.path.normpath(filtered).replace(prefix, ''))
else:
self.environ['indexFile'].default = \
self.local_dir(self.value('remoteIndex'))
return self.value('indexFile')
def host(self):
'''Returns the distribution of the local system
on which the script is running.'''
return self.value('distHost')
def local_dir(self, name):
'''Returns the path on the local system to a directory.'''
site_top = self.value('siteTop')
pos = name.rfind('./')
if pos >= 0:
localname = os.path.join(site_top, name[pos + 2:])
elif (str(self.environ['remoteSiteTop'])
and name.startswith(self.value('remoteSiteTop'))):
localname = filter_rep_ext(name)
remote_site_top = self.value('remoteSiteTop')
if remote_site_top.endswith(':'):
site_top = site_top + '/'
localname = localname.replace(remote_site_top, site_top)
elif ':' in name:
localname = os.path.join(
site_top, 'resources', os.path.basename(name))
elif not name.startswith(os.sep):
localname = os.path.join(site_top, name)
else:
localname = name.replace(
self.value('remoteSiteTop'), site_top)
return localname
def remote_dir(self, name):
'''Returns the absolute path on the remote system that corresponds
to *name*, the absolute path of a file or directory on the local
system.'''
if name.startswith(self.value('siteTop')):
return name.replace(self.value('siteTop'),
self.value('remoteSiteTop'))
return None
def load_context(self, filename):
site_top_found = False
with open(filename) as config_file:
line = config_file.readline()
while line != '':
look = re.match(r'(\S+)\s*=\s*(\S+)', line)
if look != None:
if look.group(1) == 'siteTop':
site_top_found = True
if (look.group(1) in self.environ
and isinstance(self.environ[look.group(1)], Variable)):
self.environ[look.group(1)].value = look.group(2)
else:
self.environ[look.group(1)] = look.group(2)
line = config_file.readline()
return site_top_found
def locate(self, config_filename=None):
'''Locate the workspace configuration file and derive the project
name out of its location.'''
try:
if config_filename:
self.config_filename = config_filename
self.config_name = os.path.basename(config_filename)
self.build_top_relative_cwd = os.path.dirname(config_filename)
self.load_context(self.config_filename)
else:
self.build_top_relative_cwd, self.config_filename \
= search_back_to_root(self.config_name)
except IOError:
self.build_top_relative_cwd = None
self.environ['buildTop'].configure(self)
build_top = str(self.environ['buildTop'])
site_top = str(self.environ['siteTop'])
if build_top.startswith(site_top):
# When build_top is inside the site_top, we create the config
# file in site_top for convinience so dws commands can be run
# anywhere from within site_top (i.e. both build_top
# and src_top).
self.config_filename = os.path.join(site_top, self.config_name)
else:
# When we have a split hierarchy we can build the same src_top
# multiple different ways but dws commands should exclusively
# be run from within the build_top.
self.config_filename = os.path.join(build_top, self.config_name)
if not os.path.isfile(self.config_filename):
self.save()
if self.build_top_relative_cwd == '.':
self.build_top_relative_cwd = os.path.basename(os.getcwd())
# \todo is this code still relevent?
look = re.match('([^-]+)-.*', self.build_top_relative_cwd)
if look:
# Change of project name in *indexName* on "make dist-src".
# self.build_top_relative_cwd = look.group(1)
pass
# -- Read the environment variables set in the config file.
home_dir = os.environ['HOME']
if 'SUDO_USER' in os.environ:
home_dir = home_dir.replace(os.environ['SUDO_USER'],
os.environ['LOGNAME'])
user_default_config = os.path.join(home_dir, '.dws')
if os.path.exists(user_default_config):
self.load_context(user_default_config)
site_top_found = self.load_context(self.config_filename)
if not site_top_found and not self.environ['siteTop'].value:
# By default we set *siteTop* to be the directory
# where the configuration file was found since basic paths
# such as *buildTop* and *srcTop* defaults are based on it.
self.environ['siteTop'].value = os.path.dirname(
self.config_filename)
def logname(self):
'''Name of the XML tagged log file where sys.stdout is captured.'''
filename = os.path.basename(self.config_name)
filename = os.path.splitext(filename)[0] + '.log'
filename = self.log_path(filename)
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
return filename
def logbuildname(self):
'''Name of the log file for build summary.'''
filename = os.path.basename(self.config_name)
filename = os.path.splitext(filename)[0] + '-build.log'
filename = self.log_path(filename)
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
return filename
def obj_dir(self, name):
return os.path.join(self.value('buildTop'), name)
def patch_dir(self, name):
return os.path.join(self.value('patchTop'), name)
def from_remote_index(self, remote_path, nonative=False):
'''We need to set the *remoteIndex* to a realpath when we are dealing
with a local file else links could end-up generating a different prefix
than *remoteSiteTop* for *remoteIndex*/*indexName*.'''
self.nonative = nonative
if search_repo_pat(remote_path):
remote_path = os.path.join(remote_path, self.indexName)
# Set remoteIndex.value instead of remoteIndex.default because
# we don't want to trigger a configure of logDir before we have
# a chance to set the siteTop.
look = re.match(r'(\S+@)?(\S+):(.*)', remote_path)
if look:
self.tunnel_point = look.group(2)
src_base = look.group(3)
site_base = src_base
remote_path_list = look.group(3).split(os.sep)
host_prefix = self.tunnel_point + ':'
if look.group(1):
host_prefix = look.group(1) + host_prefix
else:
# We compute *base* here through the same algorithm as done
# in *local_dir*. We do not call *local_dir* because remoteSiteTop
# is not yet defined at this point.
src_base = os.path.dirname(remote_path)
while not os.path.isdir(src_base):
src_base = os.path.dirname(src_base)
remote_path_list = remote_path.split(os.sep)
site_base = os.path.dirname(src_base)
host_prefix = ''
for i in range(0, len(remote_path_list)):
if remote_path_list[i] == '.':
site_base = os.sep.join(remote_path_list[0:i])
src_base = os.path.join(site_base, remote_path_list[i + 1])
break
look = search_repo_pat(remote_path_list[i])
if look:
# splitext does not return any extensions when the path
# starts with dot.
rep_ext = look.group(1)
if not rep_ext.startswith('.'):
_, rep_ext = os.path.splitext(look.group(1))
if remote_path_list[i] == rep_ext:
i = i - 1
if i > 2:
src_base = os.sep.join(remote_path_list[0:i])
site_base = os.sep.join(remote_path_list[0:i-1])
elif i > 1:
src_base = remote_path_list[0]
site_base = ''
else:
src_base = ''
site_base = ''
break
if not self.tunnel_point:
# We can't use realpath before we figured out where the '.'
# delimiter is in remote_path.
if len(src_base) > 0:
remote_path = os.path.normpath(remote_path).replace(
src_base, os.path.realpath(src_base))
src_base = os.path.realpath(src_base)
else:
remote_path = os.path.normpath(
os.path.join(os.getcwd(), remote_path))
src_base = os.getcwd()
if len(site_base) > 0:
site_base = os.path.realpath(site_base)
else:
site_base = os.getcwd()
self.environ['remoteIndex'].value = remote_path
self.environ['remoteSrcTop'].default = host_prefix + src_base
# Note: We used to set the context[].default field which had for side
# effect to print the value the first time the variable was used.
# The problem is that we need to make sure remoteSiteTop is defined
# before calling *local_dir*, otherwise the resulting indexFile value
# will be different from the place the remoteIndex is fetched to.
self.environ['remoteSiteTop'].value = host_prefix + site_base
def save(self):
'''Write the config back to a file.'''
if not self.config_filename:
# No config_filename means we are still figuring out siteTop,
# so we don't know where to store the config file.
return
if not os.path.exists(os.path.dirname(self.config_filename)):
os.makedirs(os.path.dirname(self.config_filename))
config_file = open(self.config_filename, 'w')
keys = sorted(self.environ.keys())
config_file.write('# configuration for development workspace\n\n')
for key in keys:
val = self.environ[key]
if len(str(val)) > 0:
config_file.write(key + '=' + str(val) + '\n')
config_file.close()
def search_path(self, name, variant=None):
'''Derives a list of directory names based on the PATH
environment variable, *name* and a *variant* triplet.'''
candidates = []
# We want the actual value of *name*Dir and not one derived from binDir
dirname = CONTEXT.value(name + 'Dir')
if os.path.isdir(dirname):
# First look into variant and finally in no variant directory
# because even though libraries are often in variant subdirectories,
# executables often are not.
if variant:
for subdir in os.listdir(dirname):
if re.match(variant, subdir):
candidates += [os.path.join(dirname, subdir)]
candidates += [dirname]
candidates += os.environ['PATH'].split(':')
dirs = []
for path in candidates:
base = os.path.dirname(path)
if name == 'lib':
# On mixed 32/64-bit system, libraries also get installed
# in lib64/. This is also true for 64-bit native python modules.
for subpath in [name, 'lib64']:
dirname = os.path.join(base, subpath)
if os.path.isdir(dirname):
if variant:
for subdir in os.listdir(dirname):
if re.match(variant, subdir):
dirs += [os.path.join(dirname, subdir)]
else:
dirs += [dirname]
elif name == 'bin':
# Especially on Fedora, /sbin, /usr/sbin, etc. are many times
# not in the PATH.
if os.path.isdir(path):
dirs += [path]
sbin = os.path.join(base, 'sbin')
if (not sbin in os.environ['PATH'].split(':')
and os.path.isdir(sbin)):
dirs += [sbin]
else:
if os.path.isdir(os.path.join(base, name)):
dirs += [os.path.join(base, name)]
if name == 'lib' and self.host() in PORT_DISTRIBS:
# Just because python modules do not get installed
# in /opt/local/lib/python2.7/site-packages
dirs += ['/opt/local/Library/Frameworks']
if name == 'share' and self.host() in APT_DISTRIBS:
dirs += ['/var/lib/gems']
return dirs
def src_dir(self, name):
return os.path.join(self.value('srcTop'), name)
def value(self, name):
'''returns the value of the workspace variable *name*. If the variable
has no value yet, a prompt is displayed for it.'''
if not name in self.environ:
raise Error("Trying to read unknown variable " + name + ".")
if (isinstance(self.environ[name], Variable)
and self.environ[name].configure(self)):
self.save()
# recursively resolve any variables that might appear
# in the variable value. We do this here and not while loading
# the context because those names can have been defined later.
value = str(self.environ[name])
look = re.match(r'(.*)\${(\S+)}(.*)', value)
while look:
indirect = ''
if look.group(2) in self.environ:
indirect = self.value(look.group(2))
elif look.group(2) in os.environ:
indirect = os.environ[look.group(2)]
value = look.group(1) + indirect + look.group(3)
look = re.match(r'(.*)\${(\S+)}(.*)', value)
return value
# Formats help for script commands. The necessity for this class
# can be understood by the following posts on the internet:
# - http://groups.google.com/group/comp.lang.python/browse_thread/thread/6df6e
# - http://www.alexonlinux.com/pythons-optparse-for-human-beings
#
# \todo The argparse (http://code.google.com/p/argparse/) might be part
# of the standard python library and address the issue at some point.
class CommandsFormatter(optparse.IndentedHelpFormatter):
def format_epilog(self, description):
import textwrap
result = ""
if description:
desc_width = self.width - self.current_indent
bits = description.split('\n')
formatted_bits = [
textwrap.fill(bit,
desc_width,
initial_indent="",
subsequent_indent=" ")
for bit in bits]
result = result + "\n".join(formatted_bits) + "\n"
return result
class IndexProjects(object):
'''Index file containing the graph dependency for all projects.'''
def __init__(self, context, source=None):
self.context = context
self.parser = XMLDbParser(context)
# XXX testing: self.parser = YAMLikeParser(context)
self.source = source
def closure(self, dgen):
'''Find out all dependencies from a root set of projects as defined
by the dependency generator *dgen*.'''
while dgen.more():
self.parse(dgen)
return dgen.topological()
def parse(self, dgen):
'''Parse the project index and generates callbacks to *dgen*'''
self.validate()
self.parser.parse(self.source, dgen)
def validate(self, force=False):
'''Create the project index file if it does not exist
either by fetching it from a remote server or collecting
projects indices locally.'''
if not self.source:
self.source = self.context.db_pathname()
if not self.source.startswith('<?xml'):
# The source is an actual string, thus we do not fetch any file.
if not os.path.exists(self.source) or force:
selection = ''
if not force:
# index or copy.
selection = select_one(
'The project index file could not '
+ 'be found at "' + self.source \
+ '". It can be regenerated through one ' \
+ 'of the two following method:',
[['fetching', 'from remote server'],
['indexing', 'local projects in the workspace']],
False)
if selection == 'indexing':
pub_collect([])
elif selection == 'fetching' or force:
remote_index = self.context.value('remoteIndex')
vcs = Repository.associate(remote_index)
# XXX Does not matter here for rsync.
# What about other repos?
vcs.update(None, self.context)
if not os.path.exists(self.source):
raise Error(self.source + ' does not exist.')
class PdbHandler(object):
'''Callback interface for a project index as generated by an *xmlDbParser*.
The generic handler does not do anything. It is the responsability of
implementing classes to filter callback events they care about.'''
def __init__(self):
pass
def end_parse(self):
pass
def project(self, proj):
pass
class Unserializer(PdbHandler):
'''Builds *Project* instances for every project that matches *include_pats*
and not *exclude_pats*. See *filters*() for implementation.'''
def __init__(self, include_pats=None, exclude_pats=None, custom_steps=None):
PdbHandler.__init__(self)
self.projects = {}
self.first_project = None
if include_pats:
self.include_pats = set(include_pats)
# Project which either fullfil all prerequisites or that have been
# explicitely excluded from installation by the user will be added
# to *exclude_pats*.
if exclude_pats:
self.exclude_pats = set(exclude_pats)
else:
self.exclude_pats = set([])
if custom_steps:
self.custom_steps = dict(custom_steps)
else:
self.custom_steps = {}
def as_project(self, name):
if not name in self.projects:
raise Error("unable to find " + name + " in the index file.",
project_name=name)
return self.projects[name]
def filters(self, project_name):
for inc in self.include_pats:
inc = inc.replace('+', '\\+')
if re.match(inc, project_name):
for exc in self.exclude_pats:
if re.match(exc.replace('+', '\\+'), project_name):
return False
return True
return False
def project(self, proj_obj):
'''Callback for the parser.'''
if (not proj_obj.name in self.projects) and self.filters(proj_obj.name):
if not self.first_project:
self.first_project = proj_obj
self.projects[proj_obj.name] = proj_obj
class DependencyGenerator(Unserializer):
'''*DependencyGenerator* implements a breath-first search of the project
dependencies index with a specific twist.
At each iteration, if all prerequisites for a project can be found
on the local system, the dependency edge is cut from the next iteration.
Missing prerequisite executables, headers and libraries require
the installation of prerequisite projects as stated by the *missings*
list of edges. The user will be prompt for *candidates*() and through
the options available will choose to install prerequisites through
compiling them out of a source controlled repository or a binary
distribution package.
*DependencyGenerator.end_parse*() is at the heart of the workspace
bootstrapping and other "recurse" features.
'''
def __init__(self, repositories, packages, exclude_pats=None,
custom_steps=None, force_update=False):
'''*repositories* will be installed from compiling
a source controlled repository while *packages* will be installed
from a binary distribution package.
*exclude_pats* is a list of projects which should be removed from
the final topological order.'''
self.roots = packages + repositories
Unserializer.__init__(self, self.roots, exclude_pats, custom_steps)
# When True, an exception will stop the recursive make
# and exit with an error code, otherwise it moves on to
# the next project.
self.stop_make_after_error = False
self.packages = set(packages)
self.repositories = set(repositories)
self.active_prerequisites = {}
for prereq_name in repositories + packages:
self.active_prerequisites[prereq_name] = (
prereq_name, 0, TargetStep(0, prereq_name))
self.levels = {}
self.levels[0] = set([])
for rep in repositories + packages:
self.levels[0] |= set([TargetStep(0, rep)])
# Vertices in the dependency tree
self.vertices = {}
self.force_update = force_update
def __str__(self):
return "vertices:\n%s" % str(self.vertices)
def connect_to(self, name, step):
if name in self.vertices:
self.vertices[name].prerequisites += [step]
def add_config_make(self, variant, configure, make, prerequisites):
config = None
config_name = ConfigureStep.genid(variant.project, variant.target)
if not config_name in self.vertices:
config = configure.associate(variant.target)
self.vertices[config_name] = config
else:
config = self.vertices[config_name]
make_name = BuildStep.genid(variant.project, variant.target)
if not make_name in self.vertices:
make = make.associate(variant.target)
make.force_update = self.force_update
self.vertices[make_name] = make
for prereq in prerequisites:
make.prerequisites += [prereq]
if config:
make.prerequisites += [config]
setup_name = SetupStep.genid(variant.project, variant.target)
self.connect_to(setup_name, make)
return self.vertices[make_name]
def add_install(self, project_name, target=None):
install_step = None
managed_name = project_name.split(os.sep)[-1]
install_name = InstallStep.genid(managed_name)
if install_name in self.vertices:
# We already decided to install this project, nothing more to add.
return self.vertices[install_name]
# We do not know the target at this point so we can't build a fully
# qualified setup_name and index into *vertices* directly. Since we
# are trying to install projects through the local package manager,
# it is doubtful we should either know or care about the target.
# That's a primary reason why target got somewhat slightly overloaded.
# We used runtime="python" instead of target="python" in an earlier
# design.
setup_name = SetupStep.genid(project_name, target)
if project_name in self.projects:
project = self.projects[project_name]
if CONTEXT.host() in project.packages:
filenames = []
flavor = project.packages[CONTEXT.host()]
for remote_path in flavor.update.fetches:
filenames += [CONTEXT.local_dir(remote_path)]
install_step = create_package_file(project_name, filenames)
update_s = self.add_update(project_name, flavor.update)
if update_s:
install_step.prerequisites += [update_s]
elif project.patch:
# build and install from source
flavor = project.patch
prereqs = [] # XXX used to contain setup of prerequisites.
update_s = self.add_update(
project_name, project.patch.update)
if update_s:
prereqs += [update_s]
install_step = self.add_config_make(
TargetStep(0, project_name, target),
flavor.configure, flavor.make, prereqs)
else:
# XXX Previously we picked the local package manager
# before patched sources without checking if it is available.
# Of course it created problems, yet we want to check existance
# as late as possible so there was no way to decide
# at this point.
versions = None
if setup_name in self.vertices:
versions = self.vertices[setup_name].versions
install_step = create_managed(
managed_name, versions=versions, target=target)
if not install_step:
# Remove special case install_step is None; replace it with
# a placeholder instance that will throw an exception
# when the *run* method is called.
install_step = InstallStep(project_name, target=target)
if install_step:
self.vertices[install_name] = install_step
self.connect_to(setup_name, install_step)
return install_step
def add_setup(self, target, deps):
targets = []
for dep in deps:
target_name = dep.target
if not dep.target:
target_name = target
cap = SetupStep.genid(dep.name)
if cap in self.custom_steps:
setup_class = self.custom_steps[cap]
else:
setup_class = SetupStep
setup = setup_class(dep.name, dep.files,
versions=dep.versions, target=target_name)
if not setup.name in self.vertices:
self.vertices[setup.name] = setup
else:
self.vertices[setup.name].add_prerequisites(setup)
targets += [self.vertices[setup.name]]
return targets
def add_update(self, project_name, update, update_rep=True):
update_name = UpdateStep.genid(project_name)
if update_name in self.vertices:
return self.vertices[update_name]
update_s = None
fetches = {}
if len(update.fetches) > 0:
# We could unconditionally add all source tarball since
# the *fetch* function will perform a *find_cache* before
# downloading missing files. Unfortunately this would
# interfere with *pub_configure* which checks there are
# no missing prerequisites whithout fetching anything.
fetches = find_cache(CONTEXT, update.fetches)
rep = None
if update_rep or not os.path.isdir(CONTEXT.src_dir(project_name)):
rep = update.rep
if update.rep or len(fetches) > 0:
update_s = UpdateStep(project_name, rep, fetches)
self.vertices[update_s.name] = update_s
return update_s
def contextual_targets(self, variant):
raise Error("DependencyGenerator should not be instantiated directly")
def end_parse(self):
further = False
next_active_prerequisites = {}
for prereq_name in self.active_prerequisites:
# Each edge is a triplet source: (color, depth, variant)
# Gather next active Edges.
color = self.active_prerequisites[prereq_name][0]
depth = self.active_prerequisites[prereq_name][1]
variant = self.active_prerequisites[prereq_name][2]
next_depth = depth + 1
# The algorithm to select targets depends on the command semantic.
# The build, make and install commands differ in behavior there
# in the presence of repository, patch and package tags.
need_prompt, targets = self.contextual_targets(variant)
if need_prompt:
next_active_prerequisites[prereq_name] = (color, depth, variant)
else:
for target in targets:
further = True