def test_get_logger_not_in_webkitpy(self):
        # Temporarily change the working directory so that we
        # can test get_logger() for a path outside of webkitpy.
        working_directory = os.getcwd()
        root_dir = "/"
        os.chdir(root_dir)

        logger = logutils.get_logger("/WebKitTools/Scripts/test-webkitpy")
        self.assertEquals(logger.name, "test-webkitpy")

        logger = logutils.get_logger("/WebKitTools/Scripts/test-webkitpy.py")
        self.assertEquals(logger.name, "test-webkitpy")

        os.chdir(working_directory)
Example #2
0
def train_on_code_files(dataset_folder, vocab_size=10000):
    logger = logutils.get_logger(__name__)
    code_files = []
    for bug_folder in os.listdir(dataset_folder):
        if not os.path.isdir(os.path.join(dataset_folder, bug_folder)):
            continue
        before_folder = os.path.join(dataset_folder, bug_folder, 'before')
        for code_file in os.listdir(before_folder):
            if code_file.endswith('.py'):
                code_files.append(os.path.join(before_folder, code_file))
        after_folder = os.path.join(dataset_folder, bug_folder, 'after')
        for code_file in os.listdir(after_folder):
            if code_file.endswith('.py'):
                code_files.append(os.path.join(after_folder, code_file))
    code_files = ast_tokenize(code_files)
    logger.debug("{} code files.".format(len(code_files)))
    train(code_files, vocab_size=vocab_size)
Example #3
0
def ast_tokenize(code_files):
    # Using AST to tokenize first to keep operators
    logger = logutils.get_logger(__name__)
    new_code_files = []
    for code_file in code_files:
        try:
            code = open(code_file).read()
            ast = parso.parse(code)
            tokens = []
            traverse(ast, tokens)
            new_code_file = os.path.join(os.path.dirname(code_file),
                                         os.path.basename(code_file).replace('.py', '.pyt').replace(' ', ''))
            new_code = ''.join(tokens)
            with open(new_code_file, 'w') as writer:
                writer.write(new_code)
            new_code_files.append(new_code_file)
        except OSError as err:
            logger.error('Fail to open code file: {}'.format(err))
        except UnicodeDecodeError as err:
            logger.error("Error when reading file: {}, {}".format(code_file, err))
    return new_code_files
Example #4
0
#
# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

import copy
import json

from requests import Request
from requests import Session

from exc import get_http_exc
from logutils import format_request
from logutils import format_response
import logutils

logger = logutils.get_logger(__file__)


class ApiRequest(object):
    def __init__(self, url, data=None, method='get', auth=None):
        self.url = url
        self.data = data
        self.method = method
        self.auth = auth

        self.encoding = 'utf-8'
        self.skip_serialize = False

        self.standard_headers = {
            'Accept': 'application/json',
        }
Example #5
0
        dct = self.call_api(url)
        obj = resource.model_class.get_instance(_atts=dct)

        return obj


if __name__ == '__main__':
    import sys
    sys.path.append('.')
    from utils import get_api

    import logging
    from logutils import get_logger

    if '-v' in sys.argv:
        logger = get_logger(__file__)
        logger.setLevel(logging.DEBUG)

    api = get_api()
    t1 = api.get_tenant(name="supertenant")
    print t1
    g1 = api.get_group(tenant=t1, name="admins")
    #print api.all_user(tenant=t1)
    #u1.description = u'a'
    #api.update_user(u1)
    #print api.create_user(initial={'name': 'h', 'surname': 'a', 'username': '******', 'tenant': t1})

    data = api.get_user_privs(g1)
    api.put_user_privs(g1, data=data)
    print data
 def test_get_logger_in_webkitpy(self):
     logger = logutils.get_logger(__file__)
     self.assertEquals(logger.name, "webkitpy.init.logutils_unittest")
Example #7
0
"""A random collection of utilities and hacks around Python's annoyances"""
import inspect
import sys
import signal
import atexit
import traceback
import time
import re
import json
from os import path

from logutils import get_logger
log = get_logger()

class register_shutdown:
	"""
	Try to ensure a clean shutdown

	Tries to make sure the shutdown function is called in all
	normal* stoppage situations.
	
	
	If pass_args is TRue, the func gets passed whatever
	was given by eg atexit-callback or signal callback, so
	probably the safest thing is to grab **kwargs
	
	If unless dont_remove is true and func returns a non-true value or raises
	an exception, it is removed from the shutdown registry.
	Without this eg. in signal situation you'd get first called
	because of the signal and if you shut down nicely, called
	again by the exit-handler. Also if the callback misbehaves
Example #8
0
def getGeminiLog(logLevel=None, logType='main'):
    log = logutils.get_logger('adutils-gemLog-Deprecated')
    return log
Example #9
0
        dct = self.call_api(url)
        obj = resource.model_class.get_instance(_atts=dct)

        return obj


if __name__ == '__main__':
    import sys
    sys.path.append('.')
    from utils import get_api

    import logging
    from logutils import get_logger

    if '-v' in sys.argv:
        logger = get_logger(__file__)
        logger.setLevel(logging.DEBUG)

    api = get_api()
    t1 = api.get_tenant(name="supertenant")
    print t1
    g1 = api.get_group(tenant=t1, name="admins")
    #print api.all_user(tenant=t1)
    #u1.description = u'a'
    #api.update_user(u1)
    #print api.create_user(initial={'name': 'h', 'surname': 'a', 'username': '******', 'tenant': t1})

    data = api.get_user_privs(g1)
    api.put_user_privs(g1, data=data)
    print data
Example #10
0
from crawler import Crawler
from logutils import get_logger
from dbmanager import DBManager, DBType

from model_converter import *

logger = get_logger(__name__)


def main():
    crawler = Crawler()
    db = DBManager(DBType.SQLITE, 'github.db')

    rate_limit = crawler.get_rate_limit()
    logger.info("Rate Limit: %s Remaining: %s", rate_limit.limit,
                rate_limit.remaining)
    # repos = crawler.fetch_repositories_with_stars_in_range(10000, 100000)
    # logger.info(repos)
    repos = crawler.fetch_repositories_with_stars_in_range(100000, 200000)
    logger.info(repos)

    # Test the DB
    for repo in repos:
        db_repo = convert_api_repo_to_db(repo)
        db.session.add(db_repo)

    db.session.commit()


if __name__ == "__main__":
    main()