Exemple #1
0
    INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
    AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
    AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
    OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
    POSSIBILITY OF SUCH DAMAGE.

"""
import sys
from lib.legacy_deps import DependancyCrawler

src_root = '/usr/local/'
crawler = DependancyCrawler(src_root)
crawler.crawl()
if len(sys.argv) < 2:
    print('inspect legacy function usage of module, choose one of:')
    for module_name in crawler.get_files():
        print(module_name)
    sys.exit()

chk_source = sys.argv[1]

usage = crawler.where_used(chk_source)
for src_filename in usage:
    module_name = crawler.get_dependency_by_src(src_filename)
    trace_data = crawler.trace(module_name)
    if chk_source not in trace_data:
        print('inspect : %s (%s)' %
Exemple #2
0
import time
import os
import os.path
from lib.legacy_deps import DependancyCrawler

# set source and target directories
target_directory = '/tmp/legacy/'
src_root = '/usr/local/'

# create target directory if not existing
if not os.path.exists(target_directory):
    os.mkdir(target_directory)

# start crawling
crawler = DependancyCrawler(src_root)
print '[%.2f] started ' % (time.time())
crawler.crawl()
print '[%.2f] collected %d dependancies in %d files' % (
    time.time(), crawler.get_total_dependencies(), crawler.get_total_files())

# generate graphs
generated_files = list()
for filename in crawler.get_files():
    file_stats = crawler.file_info(filename)
    if file_stats['levels'] > 1:
        print '[%.2f] ... writing %s' % (time.time(), filename)
        dot_filename = ('%s/%s.dot' % (target_directory, filename)).replace(
            '//', '/')
        target_filename = dot_filename.replace('.dot', '.png')
        open(dot_filename, 'w').write(crawler.generate_dot(filename))
    INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
    AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
    AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
    OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
    POSSIBILITY OF SUCH DAMAGE.

"""
import sys
from lib.legacy_deps import DependancyCrawler

src_root = '/usr/local/'
crawler = DependancyCrawler(src_root)
crawler.crawl()
if len(sys.argv) < 2:
    print ('inspect legacy function usage of module, choose one of:')
    for module_name in crawler.get_files():
        print (module_name)
    sys.exit()

chk_source = sys.argv[1]

usage = crawler.where_used(chk_source)
for src_filename in usage:
    module_name = crawler.get_dependency_by_src(src_filename)
    trace_data = crawler.trace(module_name)
    if chk_source not in trace_data:
        print ('inspect : %s (%s)' % (src_filename, ',' .join(usage[src_filename] ) ))
Exemple #4
0
import time
import os
import os.path
from lib.legacy_deps import DependancyCrawler

# set source and target directories
target_directory = '/tmp/legacy/'
src_root = '/usr/local/'

# create target directory if not existing
if not os.path.exists(target_directory):
    os.mkdir(target_directory)

# start crawling
crawler = DependancyCrawler(src_root)
print '[%.2f] started ' % (time.time())
crawler.crawl()
print '[%.2f] collected %d dependencies in %d files' % (time.time(),
                                                        crawler.get_total_dependencies(),
                                                        crawler.get_total_files())

# generate graphs
generated_files = list()
for filename in crawler.get_files():
    file_stats = crawler.file_info(filename)
    if file_stats['levels'] > 1:
        print '[%.2f] ... writing %s' % (time.time(), filename)
        dot_filename = ('%s/%s.dot' % (target_directory, filename)).replace('//', '/')
        target_filename = dot_filename.replace('.dot', '.png')
        open(dot_filename, 'w').write(crawler.generate_dot(filename))