def process_repositories(self, repo_configs, ref, action, request_body): """Verify that the suggested repositories has matching settings and issue git pull and/or deploy commands.""" import os import time import logging from wrappers import GitWrapper from lock import Lock import json logger = logging.getLogger() data = json.loads(request_body) result = [] # Process each matching repository for repo_config in repo_configs: repo_result = {} try: # Verify that all filters matches the request (if any filters are specified) if 'filters' in repo_config: # At least one filter must match for filter in repo_config['filters']: # All options specified in the filter must match for filter_key, filter_value in filter.iteritems(): # Ignore filters with value None (let them pass) if filter_value == None: continue # Support for earlier version so it's non-breaking functionality if filter_key == 'action' and filter_value == action: continue # Interpret dots in filter name as path notations node_value = data for node_key in filter_key.split('.'): # If the path is not valid the filter does not match if not node_key in node_value: logger.info( "Filter '%s'' does not match since the path is invalid" % (filter_key)) raise FilterMatchError() node_value = node_value[node_key] if filter_value == node_value: continue # If the filter value is set to True. the filter # will pass regardless of the actual value if filter_value == True: continue logger.info( "Filter '%s'' does not match ('%s' != '%s')" % (filter_key, filter_value, (str(node_value)[:75] + '..') if len(str(node_value)) > 75 else str(node_value))) raise FilterMatchError() except FilterMatchError as e: # Filter does not match, do not process this repo config continue # In case there is no path configured for the repository, no pull will # be made. if not 'path' in repo_config: res = GitWrapper.deploy(repo_config) repo_result['deploy'] = res result.append(repo_result) continue # If the path does not exist, a warning will be raised and no pull or # deploy will be made. if not os.path.isdir(repo_config['path']): logger.error( "The repository '%s' does not exist locally. Make sure it was pulled " % repo_config['path'] + "properly without errors by reviewing the log.") result.append(repo_result) continue # If the path is not writable, a warning will be raised and no pull or # deploy will be made. if not os.access(repo_config['path'], os.W_OK): logger.error( "The path '%s' is not writable. Make sure that GAD has write access to that path." % repo_config['path']) result.append(repo_result) continue running_lock = Lock( os.path.join(repo_config['path'], 'status_running')) waiting_lock = Lock( os.path.join(repo_config['path'], 'status_waiting')) try: # Attempt to obtain the status_running lock while not running_lock.obtain(): # If we're unable, try once to obtain the status_waiting lock if not waiting_lock.has_lock() and not waiting_lock.obtain( ): logger.error( "Unable to obtain the status_running lock nor the status_waiting lock. Another process is " + "already waiting, so we'll ignore the request.") # If we're unable to obtain the waiting lock, ignore the request break # Keep on attempting to obtain the status_running lock until we succeed time.sleep(5) n = 4 res = None while n > 0: # Attempt to pull up a maximum of 4 times res = GitWrapper.pull(repo_config) repo_result['git pull'] = res # Return code indicating success? if res == 0: break n -= 1 if 0 < n: res = GitWrapper.deploy(repo_config) repo_result['deploy'] = res #except Exception as e: # logger.error('Error during \'pull\' or \'deploy\' operation on path: %s' % repo_config['path']) # logger.error(e) # raise e finally: # Release the lock if it's ours if running_lock.has_lock(): running_lock.release() # Release the lock if it's ours if waiting_lock.has_lock(): waiting_lock.release() result.append(repo_result) return result
def process_repositories(self, repo_configs, ref, action, request_body): import os import time import logging from wrappers import GitWrapper from lock import Lock import json logger = logging.getLogger() data = json.loads(request_body) # Process each matching repository for repo_config in repo_configs: try: # Verify that all filters matches the request (if any filters are specified) if 'filters' in repo_config: # at least one filter must match for filter in repo_config['filters']: # all options specified in the filter must match for filter_key, filter_value in filter.iteritems(): # support for earlier version so it's non-breaking functionality if filter_key == 'action' and filter_value == action: continue if filter_key not in data or filter_value != data[filter_key]: raise FilterMatchError() except FilterMatchError as e: # Filter does not match, do not process this repo config continue # In case there is no path configured for the repository, no pull will # be made. if not 'path' in repo_config: GitWrapper.deploy(repo_config) continue running_lock = Lock(os.path.join(repo_config['path'], 'status_running')) waiting_lock = Lock(os.path.join(repo_config['path'], 'status_waiting')) try: # Attempt to obtain the status_running lock while not running_lock.obtain(): # If we're unable, try once to obtain the status_waiting lock if not waiting_lock.has_lock() and not waiting_lock.obtain(): logger.error("Unable to obtain the status_running lock nor the status_waiting lock. Another process is " + "already waiting, so we'll ignore the request.") # If we're unable to obtain the waiting lock, ignore the request break # Keep on attempting to obtain the status_running lock until we succeed time.sleep(5) n = 4 while 0 < n and 0 != GitWrapper.pull(repo_config): n -= 1 if 0 < n: GitWrapper.deploy(repo_config) except Exception as e: logger.error('Error during \'pull\' or \'deploy\' operation on path: %s' % repo_config['path']) logger.error(e) finally: # Release the lock if it's ours if running_lock.has_lock(): running_lock.release() # Release the lock if it's ours if waiting_lock.has_lock(): waiting_lock.release()
def process_repositories(self, repo_configs, ref, action, request_body): import os import time import logging from wrappers import GitWrapper from lock import Lock import json logger = logging.getLogger() data = json.loads(request_body) # Process each matching repository for repo_config in repo_configs: try: # Verify that all filters matches the request (if any filters are specified) if 'filters' in repo_config: # at least one filter must match for filter in repo_config['filters']: # all options specified in the filter must match for filter_key, filter_value in filter.iteritems(): # support for earlier version so it's non-breaking functionality if filter_key == 'action' and filter_value == action: continue if filter_key not in data or filter_value != data[ filter_key]: raise FilterMatchError() except FilterMatchError as e: # Filter does not match, do not process this repo config continue # In case there is no path configured for the repository, no pull will # be made. if not 'path' in repo_config: GitWrapper.deploy(repo_config) continue running_lock = Lock( os.path.join(repo_config['path'], 'status_running')) waiting_lock = Lock( os.path.join(repo_config['path'], 'status_waiting')) try: # Attempt to obtain the status_running lock while not running_lock.obtain(): # If we're unable, try once to obtain the status_waiting lock if not waiting_lock.has_lock() and not waiting_lock.obtain( ): logger.error( "Unable to obtain the status_running lock nor the status_waiting lock. Another process is " + "already waiting, so we'll ignore the request.") # If we're unable to obtain the waiting lock, ignore the request break # Keep on attempting to obtain the status_running lock until we succeed time.sleep(5) n = 4 while 0 < n and 0 != GitWrapper.pull(repo_config): n -= 1 if 0 < n: GitWrapper.deploy(repo_config) except Exception as e: logger.error( 'Error during \'pull\' or \'deploy\' operation on path: %s' % repo_config['path']) logger.error(e) finally: # Release the lock if it's ours if running_lock.has_lock(): running_lock.release() # Release the lock if it's ours if waiting_lock.has_lock(): waiting_lock.release()
def execute_webhook(self, repo_configs, request_headers, request_body, action): """Verify that the suggested repositories has matching settings and issue git pull and/or deploy commands.""" import os import time import logging from wrappers import GitWrapper from lock import Lock import json logger = logging.getLogger() payload = json.loads(request_body) result = [] # Process each matching repository for repo_config in repo_configs: repo_result = {} # In case there is no path configured for the repository, no pull will # be made. if not 'path' in repo_config: res = GitWrapper.deploy(repo_config) repo_result['deploy'] = res result.append(repo_result) continue # If the path does not exist, a warning will be raised and no pull or # deploy will be made. if not os.path.isdir(repo_config['path']): action.log_error( "The repository '%s' does not exist locally. Make sure it was pulled properly without errors by reviewing the log." % repo_config['path']) result.append(repo_result) continue # If the path is not writable, a warning will be raised and no pull or # deploy will be made. if not os.access(repo_config['path'], os.W_OK): action.log_error( "The path '%s' is not writable. Make sure that GAD has write access to that path." % repo_config['path']) result.append(repo_result) continue running_lock = Lock( os.path.join(repo_config['path'], 'status_running')) waiting_lock = Lock( os.path.join(repo_config['path'], 'status_waiting')) try: # Attempt to obtain the status_running lock while not running_lock.obtain(): # If we're unable, try once to obtain the status_waiting lock if not waiting_lock.has_lock() and not waiting_lock.obtain( ): action.log_error( "Unable to obtain the status_running lock nor the status_waiting lock. Another process is already waiting, so we'll ignore the request." ) # If we're unable to obtain the waiting lock, ignore the request break # Keep on attempting to obtain the status_running lock until we succeed time.sleep(5) n = 4 res = None while n > 0: # Attempt to pull up a maximum of 4 times res = GitWrapper.pull(repo_config) repo_result['git pull'] = res # Return code indicating success? if res == 0: break n -= 1 if 0 < n: res = GitWrapper.deploy(repo_config) repo_result['deploy'] = res #except Exception as e: # logger.error('Error during \'pull\' or \'deploy\' operation on path: %s' % repo_config['path']) # logger.error(e) # raise e finally: # Release the lock if it's ours if running_lock.has_lock(): running_lock.release() # Release the lock if it's ours if waiting_lock.has_lock(): waiting_lock.release() result.append(repo_result) return result
def process_repositories(self, repo_configs, ref, action, request_body): """Verify that the suggested repositories has matching settings and issue git pull and/or deploy commands.""" import os import time import logging from wrappers import GitWrapper from lock import Lock import json logger = logging.getLogger() data = json.loads(request_body) result = [] # Process each matching repository for repo_config in repo_configs: repo_result = {} try: # Verify that all filters matches the request (if any filters are specified) if 'filters' in repo_config: # At least one filter must match for filter in repo_config['filters']: # All options specified in the filter must match for filter_key, filter_value in filter.iteritems(): # Ignore filters with value None (let them pass) if filter_value == None: continue # Support for earlier version so it's non-breaking functionality if filter_key == 'action' and filter_value == action: continue # Interpret dots in filter name as path notations node_value = data for node_key in filter_key.split('.'): # If the path is not valid the filter does not match if not node_key in node_value: logger.info("Filter '%s'' does not match since the path is invalid" % (filter_key)) raise FilterMatchError() node_value = node_value[node_key] if filter_value == node_value: continue # If the filter value is set to True. the filter # will pass regardless of the actual value if filter_value == True: continue logger.info("Filter '%s'' does not match ('%s' != '%s')" % (filter_key, filter_value, (str(node_value)[:75] + '..') if len(str(node_value)) > 75 else str(node_value))) raise FilterMatchError() except FilterMatchError as e: # Filter does not match, do not process this repo config continue # In case there is no path configured for the repository, no pull will # be made. if not 'path' in repo_config: res = GitWrapper.deploy(repo_config) repo_result['deploy'] = res result.append(repo_result) continue running_lock = Lock(os.path.join(repo_config['path'], 'status_running')) waiting_lock = Lock(os.path.join(repo_config['path'], 'status_waiting')) try: # Attempt to obtain the status_running lock while not running_lock.obtain(): # If we're unable, try once to obtain the status_waiting lock if not waiting_lock.has_lock() and not waiting_lock.obtain(): logger.error("Unable to obtain the status_running lock nor the status_waiting lock. Another process is " + "already waiting, so we'll ignore the request.") # If we're unable to obtain the waiting lock, ignore the request break # Keep on attempting to obtain the status_running lock until we succeed time.sleep(5) n = 4 res = None while n > 0: # Attempt to pull up a maximum of 4 times print repo_config if not repo_config.get('branch'): if data.get('ref_type') == "tag": repo_config.update( {'tag': data.get('ref')}) elif '/' in data.get('ref', ''): repo_config.update( {'branch': data.get('ref').split('/')[-1]}) elif data.get('pull_request'): repo_config.update( {'branch': data.get( 'pull_request').get('base').get('ref')}) print repo_config res = GitWrapper.pull(repo_config) repo_result['git pull'] = res print 'pull done!' # Return code indicating success? if res == 0: break n -= 1 if 0 < n: res = GitWrapper.deploy(repo_config) repo_result['deploy'] = res except Exception as e: logger.error('Error during \'pull\' or \'deploy\' operation on path: %s' % repo_config['path']) logger.error(e.message) finally: # Release the lock if it's ours if running_lock.has_lock(): running_lock.release() # Release the lock if it's ours if waiting_lock.has_lock(): waiting_lock.release() result.append(repo_result) return result