예제 #1
0
def read_last_log(compile_type):
    logs = os.listdir(get_path('logs'))
    logs_re = re.compile(r'.+cb_compile_tester.+')
    if compile_type == 'library':
        logs_re = re.compile(r'.+libraries_test.+')
    elif compile_type == 'fetch':
        logs_re = re.compile(r'.+libraries_fetch.+')
    elif compile_type == 'target_library':
        logs_re = re.compile(r'.+target_libraries.+')

    logs = sorted([x for x in logs if x != '.gitignore' and logs_re.match(x)])

    log_timestamp_re = re.compile(
        r'(\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2})-.+\.json')
    log = None
    timestamp = None
    if len(logs) > 0:
        log = logs[-1]
        timestamp = log_timestamp_re.match(log).group(1)

    last_log = None
    if log:
        with open(get_path('logs', log)) as f:
            last_log = simplejson.loads(f.read())

    return {'log': last_log, 'timestamp': timestamp}
예제 #2
0
def read_last_log(compile_type):
    logs = os.listdir(get_path('logs'))
    logs_re = re.compile(r'.+cb_compile_tester.+')
    if compile_type == 'library':
        logs_re = re.compile(r'.+libraries_test.+')
    elif compile_type == 'fetch':
        logs_re = re.compile(r'.+libraries_fetch.+')
    elif compile_type == 'target_library':
        logs_re = re.compile(r'.+target_libraries.+')

    logs = sorted([x for x in logs if x != '.gitignore' and logs_re.match(x)])

    log_timestamp_re = re.compile(r'(\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2})-.+\.json')
    log = None
    timestamp = None
    if len(logs) > 0:
        log = logs[-1]
        timestamp = log_timestamp_re.match(log).group(1)

    last_log = None
    if log:
        with open(get_path('logs', log)) as f:
            last_log = simplejson.loads(f.read())

    return {
        'log': last_log,
        'timestamp': timestamp
    }
예제 #3
0
    def __init__(self, log_time):
        self.log_time = log_time
        self.DISQUS_API_SECRET = os.getenv('DISQUS_API_SECRET', None)
        self.DISQUS_API_PUBLIC = os.getenv('DISQUS_API_PUBLIC', None)
        self.DISQUS_ACCESS_TOKEN = os.getenv('DISQUS_ACCESS_TOKEN', None)
        self.user = {
            'id': os.getenv('DISQUS_SSO_ID', None),
            'username': os.getenv('DISQUS_SSO_USERNAME', None),
            'email': os.getenv('DISQUS_SSO_EMAIL', None),
        }
        self.SSO_KEY = self.get_disqus_sso(self.user)
        self.disqus = disqusapi.DisqusAPI(api_secret=self.DISQUS_API_SECRET,
                                          public_key=self.DISQUS_API_PUBLIC,
                                          remote_auth=self.SSO_KEY)
        self.change_log = {}
        self.last_post = None
        self.last_library = None

        with open(get_path('data', DISQUS_COMMENTS)) as f:
            self.messages = simplejson.loads(f.read())

        with open(get_path('data', EXAMPLES_WITHOUT_LIBRARY_DB)) as f:
            self.examples_without_library = simplejson.loads(f.read())
예제 #4
0
    def __init__(self, log_time):
        self.log_time = log_time
        self.DISQUS_API_SECRET = os.getenv('DISQUS_API_SECRET', None)
        self.DISQUS_API_PUBLIC = os.getenv('DISQUS_API_PUBLIC', None)
        self.DISQUS_ACCESS_TOKEN = os.getenv('DISQUS_ACCESS_TOKEN', None)
        self.user = {
            'id': os.getenv('DISQUS_SSO_ID', None),
            'username': os.getenv('DISQUS_SSO_USERNAME', None),
            'email': os.getenv('DISQUS_SSO_EMAIL', None),
        }
        self.SSO_KEY = self.get_disqus_sso(self.user)
        self.disqus = disqusapi.DisqusAPI(api_secret=self.DISQUS_API_SECRET,
                                        public_key=self.DISQUS_API_PUBLIC,
                                        remote_auth=self.SSO_KEY)
        self.change_log = {}
        self.last_post = None
        self.last_library = None

        with open(get_path('data', DISQUS_COMMENTS)) as f:
            self.messages = simplejson.loads(f.read())

        with open(get_path('data', EXAMPLES_WITHOUT_LIBRARY_DB)) as f:
            self.examples_without_library = simplejson.loads(f.read())
예제 #5
0
def report_creator(compile_type, log_entry, log_file):
    """Creates a report json after each compile.
    `logs`: a list in which all log files located in logs directory are added.
    `logs_to_examine`: the list of all log files located in logs directory sorted.
    `tail`: the two most recent logs in logs directory.
    `diff`: a dictionary where all differences between the two logs are stored.
    `changes`: a counter indicating the number of differences found between the two logs.
    """

    logs = os.listdir(get_path('logs'))

    logs_re = re.compile(r'.+cb_compile_tester.+')
    if compile_type == 'library':
        logs_re = re.compile(r'.+libraries_test.+')
    elif compile_type == 'fetch':
        logs_re = re.compile(r'.+libraries_fetch.+')
    elif compile_type == 'target_library':
        logs_re = re.compile(r'.+target_libraries.+')

    logs = sorted([x for x in logs if logs_re.match(x)])
    tail = logs[-2:]

    # Opens the last, or the last two log files and gathers all their contents.
    logs_to_examine = []
    for log in tail:
        try:
            with open(get_path('logs', log)) as f:
                logs_to_examine.append(simplejson.loads(f.read()))
        except:
            print 'Log:', log, 'not found'

    diff = {}
    changes = 0

    # We have only one log file, it is the first time we run the test.
    if len(logs_to_examine) == 1:
        diff = logs_to_examine[0]
        changes += 1

    # We have more than one log files, it is not the first time we run the test.
    if len(logs_to_examine) >= 2:

        old_log = logs_to_examine[0]
        new_log = logs_to_examine[1]

        #Iterate over all new_log keys (urls).
        for url in new_log.keys():

            # Check if key (url) is included in `old_log`. If not, add an entry to `diff` dictionary.
            if url not in old_log:
                diff[url] = new_log[url]
                changes += 1
                continue
            """Check if log comes from test test_libraries_fetch.py test.
            If yes, we check if the `old_log[url]`value is the same with
            `new_log[url]`value. If not, add an entry to `diff` dictionary."""
            if compile_type == 'fetch':
                if old_log[url] != new_log[url]:
                    diff[url] = new_log[url]
                    changes += 1
                continue
            """Iterate over all `new_log[url]` keys. Keys can have one of the following
            values: 'success', 'fail', 'open_fail', 'error', 'comment'."""
            for result in new_log[url].keys():
                """Check if for the specific url, result is included in
                `old_log[url]` keys. If not, check if specific url has an entry in
                `diff` dictionary and if not create one. Then add the `result` value.
                e.g. `result`: success
                     `old_log[url].keys()`: ['fail', 'success']"""

                if result not in old_log[url].keys():
                    if url not in diff:
                        diff[url] = {}
                    diff[url][result] = new_log[url][result]
                    changes += 1
                    continue

                # Check if for the specific url, the result is `comment` or `open_fail` or `error`.
                if result == 'comment' or result == 'open_fail' or result == 'error':
                    # Check if the value for the specific result is the same in both logs.
                    if old_log[url][result] != new_log[url][result]:
                        # Check if the url is on diff dictionary, if not I add it.
                        if url not in diff:
                            diff[url] = {}
                        diff[url][result] = new_log[url][result]
                        changes += 1

                # Check if for the specific url, the result is `success` or `fail`.
                elif result == 'success' or result == 'fail':
                    for board in new_log[url][result]:
                        if board not in old_log[url][result]:
                            if url not in diff:
                                diff[url] = {}
                            if result not in diff[url]:
                                diff[url][result] = []
                            diff[url][result].append(board)
                            changes += 1

    #Create report and write the results.
    filename_tokens = os.path.basename(log_file).split('.')
    filename = '.'.join(filename_tokens[0:-1])
    extension = filename_tokens[-1]
    filename = 'report_' + filename + '_' + str(changes) + '.' + extension
    path = get_path('reports', filename)
    with open(path, 'w') as f:
        f.write(jsondump(diff))
예제 #6
0
# Max test runtime into saucelabs
# 2.5 hours (3 hours max)
SAUCELABS_TIMEOUT_SECONDS = 10800 - 1800

# Throttle between compiles
COMPILES_PER_MINUTE = 10


def throttle_compile():
    min = 60 / COMPILES_PER_MINUTE
    max = min + 1
    time.sleep(random.uniform(min, max))


BOARDS_FILE = 'boards_db.json'
BOARDS_PATH = get_path('data', BOARDS_FILE)
with open(BOARDS_PATH) as f:
    BOARDS_DB = simplejson.loads(f.read())


def read_last_log(compile_type):
    logs = os.listdir(get_path('logs'))
    logs_re = re.compile(r'.+cb_compile_tester.+')
    if compile_type == 'library':
        logs_re = re.compile(r'.+libraries_test.+')
    elif compile_type == 'fetch':
        logs_re = re.compile(r'.+libraries_fetch.+')
    elif compile_type == 'target_library':
        logs_re = re.compile(r'.+target_libraries.+')

    logs = sorted([x for x in logs if x != '.gitignore' and logs_re.match(x)])
예제 #7
0
def report_creator(compile_type, log_entry, log_file):
    """Creates a report json after each compile.
    `logs`: a list in which all log files located in logs directory are added.
    `logs_to_examine`: the list of all log files located in logs directory sorted.
    `tail`: the two most recent logs in logs directory.
    `diff`: a dictionary where all differences between the two logs are stored.
    `changes`: a counter indicating the number of differences found between the two logs.
    """

    logs = os.listdir(get_path('logs'))

    logs_re = re.compile(r'.+cb_compile_tester.+')
    if compile_type == 'library':
        logs_re = re.compile(r'.+libraries_test.+')
    elif compile_type == 'fetch':
        logs_re = re.compile(r'.+libraries_fetch.+')
    elif compile_type == 'target_library':
        logs_re = re.compile(r'.+target_libraries.+')

    logs = sorted([x for x in logs if logs_re.match(x)])
    tail = logs[-2:]

    # Opens the last, or the last two log files and gathers all their contents.
    logs_to_examine = []
    for log in tail:
        try:
            with open(get_path('logs', log)) as f:
                logs_to_examine.append(simplejson.loads(f.read()))
        except:
            print 'Log:', log, 'not found'

    diff = {}
    changes = 0

    # We have only one log file, it is the first time we run the test.
    if len(logs_to_examine) == 1:
        diff = logs_to_examine[0]
        changes += 1

    # We have more than one log files, it is not the first time we run the test.
    if len(logs_to_examine) >= 2:

        old_log = logs_to_examine[0]
        new_log = logs_to_examine[1]

        #Iterate over all new_log keys (urls).
        for url in new_log.keys():

            # Check if key (url) is included in `old_log`. If not, add an entry to `diff` dictionary.
            if url not in old_log:
                diff[url] = new_log[url]
                changes += 1
                continue

            """Check if log comes from test test_libraries_fetch.py test.
            If yes, we check if the `old_log[url]`value is the same with
            `new_log[url]`value. If not, add an entry to `diff` dictionary."""
            if compile_type == 'fetch':
                if old_log[url] != new_log[url]:
                    diff[url] = new_log[url]
                    changes += 1
                continue

            """Iterate over all `new_log[url]` keys. Keys can have one of the following
            values: 'success', 'fail', 'open_fail', 'error', 'comment'."""
            for result in new_log[url].keys():

                """Check if for the specific url, result is included in
                `old_log[url]` keys. If not, check if specific url has an entry in
                `diff` dictionary and if not create one. Then add the `result` value.
                e.g. `result`: success
                     `old_log[url].keys()`: ['fail', 'success']"""

                if result not in old_log[url].keys():
                    if url not in diff:
                        diff[url] = {}
                    diff[url][result] = new_log[url][result]
                    changes += 1
                    continue

                # Check if for the specific url, the result is `comment` or `open_fail` or `error`.
                if result == 'comment' or result == 'open_fail' or result == 'error':
                    # Check if the value for the specific result is the same in both logs.
                    if old_log[url][result] != new_log[url][result]:
                        # Check if the url is on diff dictionary, if not I add it.
                        if url not in diff:
                            diff[url] = {}
                        diff[url][result] = new_log[url][result]
                        changes += 1

                # Check if for the specific url, the result is `success` or `fail`.
                elif result == 'success' or result == 'fail':
                    for board in new_log[url][result]:
                        if board not in old_log[url][result]:
                            if url not in diff:
                                diff[url] = {}
                            if result not in diff[url]:
                                diff[url][result] = []
                            diff[url][result].append(board)
                            changes += 1

    #Create report and write the results.
    filename_tokens = os.path.basename(log_file).split('.')
    filename = '.'.join(filename_tokens[0:-1])
    extension = filename_tokens[-1]
    filename = 'report_' + filename + '_' + str(changes) + '.' + extension
    path = get_path('reports', filename)
    with open(path, 'w') as f:
        f.write(jsondump(diff))
예제 #8
0
VERIFICATION_SUCCESSFUL_MESSAGE_EDITOR = 'Verification successful!'
VERIFICATION_FAILED_MESSAGE_EDITOR = 'Verification failed!'

# Max test runtime into saucelabs
# 2.5 hours (3 hours max)
SAUCELABS_TIMEOUT_SECONDS = 10800 - 1800

# Throttle between compiles
COMPILES_PER_MINUTE = 10
def throttle_compile():
    min = 60 / COMPILES_PER_MINUTE
    max = min + 1
    time.sleep(random.uniform(min, max))

BOARDS_FILE = 'boards_db.json'
BOARDS_PATH = get_path('data', BOARDS_FILE)
with open(BOARDS_PATH) as f:
    BOARDS_DB = simplejson.loads(f.read())

def read_last_log(compile_type):
    logs = os.listdir(get_path('logs'))
    logs_re = re.compile(r'.+cb_compile_tester.+')
    if compile_type == 'library':
        logs_re = re.compile(r'.+libraries_test.+')
    elif compile_type == 'fetch':
        logs_re = re.compile(r'.+libraries_fetch.+')
    elif compile_type == 'target_library':
        logs_re = re.compile(r'.+target_libraries.+')

    logs = sorted([x for x in logs if x != '.gitignore' and logs_re.match(x)])