def _checkDockerImage(self): """Checks if the Docker Image is missing """ try: self.checkRequirements() except DockerImageMissingException as e: log.error(e.message) sys.exit(E_DOCKER_IMAGE_MISSING)
def _preperation(self): """Loads all settings and prepares the daemon """ self._useirc = self._args["use_irc"] self._noout = self._args["no_output"] self._debug = self._args["debug"] self._logserver = self._args["use_logserver"] self._logserver_ip = self._args["logserver_ip"] self._logserver_port = self._args["logserver_port"] self._irc_config = {} self._ircbot = None self._logserver_httpd = None self._hostname = gethostname() self._auth = DaemonAuth(DAEMON_AUTH_PATH) self._daemon_info = { "jobs": [], "scheduled_builds": 0, "running_builds": 0 } # create locks for thread-safe communications self._irclock = threading.Lock() self._daemon_info_lock = threading.Lock() # load daemon settings self.loadDaemonSettings() # load irc bot config self.loadIRCBotConfig() # load auto build configuration file if self._args["autobuild_config"]: self._autoBuildConfigFile = self._args["autobuild_config"] else: self._autoBuildConfigFile = configmanager.get_prop( "daps_autobuild_config") self.autoBuildConfig = self.loadAutoBuildConfig( self._autoBuildConfigFile) # fetch all projects try: self.projects = self.autoBuildConfig.fetchProjects() except GitInvalidRepoException as e: log.error("Configuration error in auto build config '%s'! %s", \ self._autoBuildConfigFile, e.message) sys.exit(E_INVALID_GIT_REPO)
def execute(self, args): """@see Action.execute() """ self._args = args self._ip = self._args["ip"] self._port = self._args["port"] self._dc_files = self._args["dcfiles"] self._projects = self._args["projects"] self._error = 0 if not self._dc_files and not self._projects: log.error("You must specify either --projects or --dcfiles!") sys.exit(E_INVALID_CLI) asyncio.get_event_loop().run_until_complete(self.start_client()) if self._error: sys.exit(self._error)
def generate_config(self, path): """It generates a configuration file at the desired location :param string path: The path where the config file should be generated """ try: configmanager.generate_config(path, self._args["force"]) except ConfigFileAlreadyExistsException as e: log.error("There is already a file called 'dapsenv.conf' in the directory '%s'. " \ "Use --force to overwrite that file.", e.path) sys.exit(E_CONFIG_FILE_ALREADY_CREATED) except ConfigFileCreationPermissionErrorException as e: log.error( "Could not create config file at '%s'. Permission denied.", e.path) sys.exit(E_CONFIG_FILE_PERMISSION_DENIED) print("The configuration file got successfully created at: {}".format( path))
def start_client(self): try: ws = yield from websockets.connect("ws://{}:{}/".format(self._ip, self._port)) # request status information packet yield from ws.send(json.dumps({ "id": 4, "dc_file": self._dc_file, "format": self._format_name })) # fetch server message res = yield from ws.recv() try: res = json.loads(res) if "error" in res: sys.stderr.write(red("Error: {}\n".format(res["error"]))) else: print(b64decode(res["log"]).decode("ascii")) except ValueError: log.error("Invalid data received from API server.") self._error = E_API_SERVER_INVALID_DATA_SENT except (ConnectionRefusedError, gaierror, OSError) as e: if "Connect call failed" in e.strerror or "Name or service not known" in e.strerror: log.error("Connection to API server failed. Check if the IP address and the " \ "port are correct and if the firewall port is open.") self._error = E_API_SERVER_CONN_FAILED except websockets.exceptions.ConnectionClosed: log.error("The API server has closed the connection.") self._error = E_API_SERVER_CLOSED_CONNECTION
def start_client(self): try: ws = yield from websockets.connect("ws://{}:{}/".format( self._ip, self._port)) if not self._dc_files: self._dc_files = [] if not self._projects: self._projects = [] # request status information packet yield from ws.send( json.dumps({ "id": 2, "token": getToken(), "dc_files": self._dc_files, "projects": self._projects })) # fetch server message res = yield from ws.recv() try: res = json.loads(res) if "error" in res: sys.stderr.write(red("Error: {}\n".format(res["error"]))) else: for dc_file in self._dc_files: if dc_file in res["dc_files"]: print("Build request scheduled for DC-File '{}'.". format(dc_file)) else: sys.stderr.write(red("Error: Could not find DC-File '{}' in any " \ "projects.\n".format(dc_file))) for project in self._projects: if project in res["projects"]: print("Build request scheduled for project '{}'.". format(project)) else: sys.stderr.write( red("Error: Invalid project name '{}'.\n". format(project))) except ValueError: log.error("Invalid data received from API server.") self._error = E_API_SERVER_INVALID_DATA_SENT except (ConnectionRefusedError, gaierror, OSError) as e: if "Connect call failed" in e.strerror or "Name or service not known" in e.strerror: log.error("Connection to API server failed. Check if the IP address and the " \ "port are correct and if the firewall port is open.") self._error = E_API_SERVER_CONN_FAILED except websockets.exceptions.ConnectionClosed: log.error("The API server has closed the connection.") self._error = E_API_SERVER_CLOSED_CONNECTION
def __init__(self, file_name): log.error("Could not access config file '%s'! Please check the permissions.", file_name) sys.exit(E_CONFIG_FILE_PERMISSION_DENIED)
def _prepare_build_task(self): """Goes through all specified repositories and updates those """ for i in self.projects: # pull new commits into repository self.projects[i]["repo"].pull(self.projects[i]["vcs_branch"], force=True) # fetch current commit hash from branch commit = self.projects[i]["repo"].getLastCommitHash( self.projects[i]["vcs_branch"]) # check if the last commit hash got changed if self.projects[i]["vcs_lastrev"] != commit: old_commit = self.projects[i]["vcs_lastrev"] # update to the new commit hash self.projects[i]["vcs_lastrev"] = commit[:] self.autoBuildConfig.updateCommitHash( self.projects[i]["project"], commit[:]) # get changed files changed_files = [] try: changed_files = self.projects[i][ "repo"].getChangedFilesBetweenCommits( old_commit, commit) except GitErrorException: pass # determine assigned DC file for each changed file for dc_file, dc_object in self.projects[i]["dc_files"].items(): build = False if dc_object.rootid: try: assigned_files = xslt.getAllUsedFiles( "{}/xml/{}".format( self.projects[i]["vcs_repodir"], dc_object.main), dc_object.rootid) # is at least one element from "changed_files" in "assigned_files" res = lambda a, b: any(i in assigned_files for i in changed_files) if res: build = True except InvalidRootIDException: log.error( "Invalid root id in main file '{}' of DC File '{}' specified. Repository: {}" .format(dc_object.main, dc_file, self.projects[i]["vcs_repodir"])) else: build = True if build: self._daemon_info_lock.acquire() self._daemon_info["jobs"].append({ "project": copy.copy(self.projects[i]), "dc_file": dc_file[:], "commit": commit[:], "status": 0, "container_id": "", "time_started": 0 }) self._daemon_info["scheduled_builds"] += 1 self._daemon_info_lock.release()
def start_client(self): try: ws = yield from websockets.connect("ws://{}:{}/".format( self._ip, self._port)) # request status information packet yield from ws.send(json.dumps({"id": 1})) # fetch server message res = yield from ws.recv() try: res = json.loads(res) print("Received status information from API server: {}:{}\n". format(self._ip, self._port)) print("Running Builds:\t\t{}".format(res["running_builds"])) print("Scheduled Builds:\t{}".format(res["scheduled_builds"])) table_running = PrettyTable( ["Project", "DC-File", "Branch", "Commit", "Started"]) table_scheduled = PrettyTable( ["Project", "DC-File", "Branch", "Commit"]) for job in res["jobs"]: # append only running builds if job["status"]: table_running.add_row([ job["project"], job["dc_file"], job["branch"], job["commit"][:24], datetime.fromtimestamp( job["time_started"]).strftime( "%m/%d/%Y %H:%M:%S") ]) else: table_scheduled.add_row([ job["project"], job["dc_file"], job["branch"], job["commit"][:24] ]) if res["running_builds"]: print("\nCurrent Running Jobs:") print(table_running) print() if res["scheduled_builds"]: if res["running_builds"]: print("Scheduled Jobs:") else: print("\nScheduled Jobs:") print(table_scheduled) print() except ValueError: log.error("Invalid data received from API server.") self._error = E_API_SERVER_INVALID_DATA_SENT except (ConnectionRefusedError, gaierror, OSError) as e: if "Connect call failed" in e.strerror or "Name or service not known" in e.strerror: log.error("Connection to API server failed. Check if the IP address and the " \ "port are correct and if the firewall port is open.") self._error = E_API_SERVER_CONN_FAILED except websockets.exceptions.ConnectionClosed: log.error("The API server has closed the connection.") self._error = E_API_SERVER_CLOSED_CONNECTION
def __init__(self): log.error("The current user is not a member of the 'docker' group. If you recently " \ "added yourself to the 'docker' group, try to logout and back in again.") sys.exit(E_NOT_DOCKER_GROUP_MEMBER)
def __init__(self, path): log.error("The auto build config file '%s' could not be found.", path) sys.exit(E_AUTOBUILDCONFIG_NOT_FOUND)
def __init__(self, path, error): log.error("The auto build configuration file '%s' is invalid. Error: %s", \ path, error ) sys.exit(E_AUTOBUILDCONFIG_SYNTAX_ERROR)
def __init__(self): log.error("The property 'daps_autobuild_config' is not configured in the configuration " \ "file.") sys.exit(E_CONFIG_AUTOBUILD_ERROR)
def __init__(self, file_name): log.error("Config file '%s' does not exist. Please generate it by using: 'dapsenv " \ "config --generate --path %s'", file_name, file_name) sys.exit(E_CONFIG_FILE_NOT_CREATED)
def __init__(self, action): log.error("No implementation for '%s' found.", action) sys.exit(E_NO_IMPLEMENTATION_FOUND)
def __init__(self, message=""): if len(message): log.error(message) sys.exit(E_INVALID_CLI)
def __str__(self): log.error(self.message)
def __str__(self): log.error("Could not execute '%s': %s", self.command, self.stderr)
def __str__(self): log.error("Unexpected stderr for command '%s' caught: %s", \ self.command, self.stderr )