Example #1
0
 def get_source_git(self, src, refspec=None):
     assert isinstance(src, minion.parser.GitSource)
     refspec = refspec or src.branch or 'master'
     logger.debug('getting %s with refspec %s' % (src, refspec))
     repo = os.path.join(self.GITREPOS, src.name.normal)
     if os.path.exists(repo) and os.path.isdir(repo):
         logger.debug('repository exists; erasing it before cloning')
         shutil.rmtree(repo)
     try:
         env = {'GIT_DIR': self.GITCACHE}
         if 'SSH_AUTH_SOCK' in os.environ:
             env['SSH_AUTH_SOCK'] = os.environ['SSH_AUTH_SOCK']
         if 'SSH_AGENT_PID' in os.environ:
             env['SSH_AGENT_PID'] = os.environ['SSH_AGENT_PID']
         run_no_fail(('git', 'remote', 'rm', src.name.normal), env=env)
         run('git-remote-add', ('git', 'remote', 'add', src.name.normal, src.url), env=env)
         run('git-fetch', ('git', 'fetch', src.name.normal), env=env)
         run('git-clone', ('git', 'clone', '--mirror', '--shared', '--reference',
                           self.GITCACHE, src.url, repo), env=env)
         env = {'GIT_DIR': repo}
         rev = run('git-rev-list', ('git', 'rev-list', '-n', '1', refspec), env=env)
         rev = rev.decode('utf8')
         return rev.strip() + ' ' + refspec
     except OSError as e:
         if e.errno == errno.ENOENT:
             raise MinionException('git binary not found (is it installed?)')
         raise e
Example #2
0
 def run(self):
     ident = self.minion_daemon.add_thread()
     logger.meta.prefix = '[%s] ' % ident
     info = None
     try:
         creds = self.sock.getsockopt(socket.SOL_SOCKET, socket.SO_PEERCRED, struct.calcsize('3i'))
         info = 'pid=%d, uid=%d, gid=%d' % struct.unpack('3i', creds)
         logger.info('accepting connection from %s' % (info,))
         buf = b''
         while True:
             data = self.sock.recv(4096)
             if not data:
                 break
             buf += data
             if b'\n' not in buf:
                 continue
             cmds = buf.split(b'\n')
             buf = cmds[-1]
             cmds = cmds[:-1]
             for cmd in cmds:
                 cmd = cmd.decode('ascii')
                 logger.debug('received %r' % (cmd,))
                 output = self.minion_daemon.dispatch(self.sock, cmd)
                 self.sock.sendall(json.dumps(output).encode('utf8'))
         self.sock.shutdown(socket.SHUT_RDWR)
         self.sock.close()
     except BrokenPipeError as e:
         pass
     except Exception as e:
         logger.exception('error processing request')
     finally:
         self.minion_daemon.rm_thread(ident)
         logger.info('finished')
Example #3
0
 def create_socket(self):
     logger.info("creating socket at %r" % self.SOCKET)
     if os.path.exists(self.SOCKET):
         logger.debug('socket already exists; erasing it')
         os.unlink(self.SOCKET)
     sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
     sock.bind(self.SOCKET)
     sock.listen(8)
     return sock
Example #4
0
 def add_thread(self):
     while True:
         ident = str(binascii.hexlify(os.urandom(4)), 'ascii')
         with self._threads_mtx:
             if ident in self._threads:
                 logger.debug('generated duplicate identifier %s' % ident)
                 continue
             self._threads.add(ident)
             logger.debug('generated identifier %s' % ident)
         return ident
Example #5
0
 def parsed_sources(self, sources, path=None):
     parsed = [src for src in self.parse(path) if issource(src)]
     sources = [SourceIdentifier(s) for s in sources]
     if not sources:
         logger.debug('no sources provided, using all sources')
         sources = parsed
     else:
         by_name = dict([(s.name, s) for s in parsed])
         for idx, src in enumerate(sources):
             if src not in by_name:
                 raise MinionException("unknown source %r" % src.normal)
             sources[idx] = by_name[src]
     return parsed, sources
Example #6
0
 def run(self):
     try:
         logger.meta.prefix = '[build %s/%s] ' % (self.controller.name, self.proc.name)
         if not self.controller.wait_for(self.proc):
             logger.info('aborting because of prior failure')
             return
         sources, artifacts = self.controller.get_inputs()
         stub = self.stub(sources, artifacts, '-')
         if self.controller.is_cached(stub):
             logger.debug('finishing with released copy')
             self.controller.finish_cached(self.proc, stub, released=True)
             return
         image = self.build_image(self.proc.path)
         logger.debug('docker image is %r' % image)
         stub = self.stub(sources, artifacts, image)
         if self.controller.is_cached(stub):
             success, X, X = self.controller.get_cached(self.proc, stub)
             if success or not self.controller.retry_failures:
                 logger.debug('finishing with cached copy')
                 self.controller.finish_cached(self.proc, stub)
                 return
         success, log, artifacts =  self.run_image(sources, artifacts, stub, image)
         if success:
             self.controller.finish_success(self.proc, stub, log, artifacts)
         else:
             self.controller.finish_error(self.proc, stub, log)
     except Exception as e:
         logger.exception('docker worker failed')
     finally:
         self.controller.abort_if_not_finished(self.proc)
Example #7
0
 def dispatch_update_heads(self, sock, cmd):
     parser = minion.cmd.update_heads(MinionThrowingArgumentParser())
     args = parser.parse_args(cmd)
     with self._heads_mtx:
         parsed, sources = self.parsed_sources(args.sources)
         ptrs = {}
         for src in sources:
             ptr = None
             if isinstance(src, minion.parser.FetchSource):
                 ptr = self.get_source_fetch(src)
             elif isinstance(src, minion.parser.GitSource):
                 ptr = self.get_source_git(src)
             else:
                 assert False
             assert ptr is not None
             logger.debug('head of %s is %s' % (src.name, ptr))
             ptrs[src.name.normal] = ptr
         if os.path.exists(self.HEADS):
             logger.debug('HEADS file exists; parsing it')
             old_ptrs = self.sources_load(self.HEADS)
         else:
             logger.debug('HEADS file does not exist; will create it')
             old_ptrs = {}
         new_ptrs = old_ptrs.copy()
         new_ptrs.update(ptrs)
         A = set(new_ptrs.keys())
         B = set([p.name.normal for p in parsed])
         parsed_names = [p.name.normal for p in parsed]
         if A != B:
             missing = B - A
             missing = sorted(missing, key=parsed_names.index)
             logger.warning('missing head for %s' % (', '.join(missing),))
         self.sources_save(self.HEADS, new_ptrs, parsed_names)
     return {'status': 'success'}
Example #8
0
 def get_source_fetch(self, src):
     assert isinstance(src, minion.parser.FetchSource)
     logger.debug('getting %s' % (src,))
     if src.sha256 and self.blobs.has(src.sha256):
         logger.debug('%s already in cache' % (src.name,))
         return src.sha256 + ' ' + str(src.name)
     tmpdir = None
     try:
         tmpdir = tempfile.mkdtemp(prefix='minion-')
         path = os.path.join(tmpdir, 'fetched')
         try:
             logger.debug('saving %s to %s' % (src.url, path))
             urllib.request.urlretrieve(src.url, path)
         except urllib.error.HTTPError as e:
             raise MinionException('could not retrieve %s: %s' % (src.name, e))
         sha256 = self.blobs.add(path)
         logger.debug('sha256(%s) = %s' % (src.name, sha256))
         if src.sha256 is not None and sha256 != src.sha256:
             raise MinionException('checksum mismatch on source %s' % src.name)
         return sha256 + ' ' + str(src.name)
     finally:
         if tmpdir is not None and os.path.exists(tmpdir):
             shutil.rmtree(tmpdir)
Example #9
0
 def dispatch_build(self, sock, cmd):
     parser = minion.cmd.build(MinionThrowingArgumentParser())
     args = parser.parse_args(cmd)
     chosen_procs = None
     if args.processes:
         args.processes = tuple(args.processes.split(','))
         chosen_procs = self.parse_subset(args.processes)
     report_name = args.name or datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S')
     report_name = args.target + ':' + report_name
     logger.info('running build process for %s; results will be saved to %s' % (args.target, report_name))
     path = self.TARGET(args.target)
     if not os.path.exists(path):
         raise MinionException("target %r doesn't exist" % args.target)
     with self._heads_mtx:
         mblob = self.blobs.add(self.MINIONFILE)
         sblob = self.blobs.add(os.path.join(path, 'HEADS'))
     minionfile = self.blobs.path(mblob)
     logger.debug('using minionfile %s' % minionfile)
     sources = self.blobs.path(sblob)
     logger.debug('using sources %s' % sources)
     jc = JobController(self, self.sources_load(sources), report_name)
     jc.retry_failures = args.retry_failures
     for proc in self.parse(minionfile):
         if not isprocess(proc):
             continue
         if not args.processes or proc.name in chosen_procs:
             logger.debug('adding %s' % (proc,))
             jc.add(proc)
     output = {}
     output['name'] = report_name
     output['minionfile'] = mblob
     output['sources'] = sblob
     output['reports'] = []
     with self._builds_mtx:
         path = os.path.join(self.BUILDS, report_name)
         if os.path.exists(path) or report_name in self._builds_set:
             raise MinionException('build %r already exists' % report_name)
         self._builds_set.add(report_name)
         self._builds_queue.put((output, jc))
     return {'status': 'success'}
Example #10
0
 def rm_thread(self, ident):
     with self._threads_mtx:
         if ident in self._threads:
             self._threads.remove(ident)
             logger.debug('retired identifier %s' % ident)
Example #11
0
 def forget(self, inputs_id):
     x = self.lookup(inputs_id)
     os.unlink(self._path(inputs_id))
     logger.debug('removed process mapping %s... -> %s...' % (inputs_id[:8], x[:8]))
Example #12
0
 def run_image(self, sources, artifacts, stub, image):
     tmp = None
     try:
         tmp = tempfile.mkdtemp(prefix='minion-sources-', dir=self.controller.minion.workdir)
         logger.debug('running out of directory %r' % tmp)
         env = {}
         for d in self.proc.dependencies:
             if isinstance(d, SourceIdentifier):
                 ptr = sources[d.normal].split(' ', 1)[0]
                 if len(ptr) == 40:
                     clone_repo(d, self.controller.minion.GITREPOS, tmp, ptr)
                     env[d.var] = os.path.join('/deps', d.normal)
                 elif len(ptr) == 64:
                     self.controller.minion.blobs.copy(ptr, os.path.join(tmp, str(d)))
                     env[d.var] = os.path.join('/deps', str(d))
                 else:
                     assert False
             if isinstance(d, ArtifactIdentifier):
                 sha256, name = artifacts[d]
                 intermediate = ''
                 if d in self.proc.full:
                     intermediate = d.process
                 dirn = os.path.join(tmp, intermediate)
                 if not os.path.exists(dirn):
                     os.makedirs(dirn)
                 self.controller.minion.blobs.copy(sha256, os.path.join(dirn, name))
                 env[d.var] = os.path.join('/deps', intermediate, name)
         name = sha256sum(stub + utcnow().encode('utf8'))
         env = [('-e', k + '=' + v) for k, v in sorted(env.items())]
         p = subprocess.Popen(('docker', 'run', '--privileged') + sum(env, ()) +
                              ('--name', name, '-v', os.path.abspath(tmp) + ":/deps", image),
                              cwd=self.controller.minion.BUILD,
                              stdin=subprocess.PIPE,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.STDOUT)
         stdout, stderr = p.communicate()
         log = stdout.decode('utf8', 'ignore')
         if p.returncode != 0:
             return False, ("docker run of %s failed:\n" % self.proc.name) + log, {}
         new_artifacts = {}
         for k, v in re.findall('^(MINION_ARTIFACT_.*?)=(.*)$', stdout.decode('utf8', 'ignore'), re.MULTILINE):
             p = subprocess.Popen(('docker', 'cp', name + ':' + v, tmp),
                                  stdin=subprocess.PIPE,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.STDOUT)
             stdout, stderr = p.communicate()
             if p.returncode != 0:
                 return False, ("docker cp of %s failed:\n" % self.proc.name) + stdout.decode('utf8', 'ignore'), {}
             out = os.path.join(tmp, os.path.basename(v))
             new_artifacts[k] = (self.controller.minion.blobs.add(out), os.path.basename(v))
         new_artifacts_by_id = {}
         for a in self.proc.artifacts:
             if a.var not in new_artifacts:
                 return False, "process %s failed to produce artifact %s\n" % (self.proc.name, a) + log, {}
             new_artifacts_by_id[a] = new_artifacts[a.var]
         p = subprocess.Popen(('docker', 'rm', name),
                              stdin=subprocess.PIPE,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.STDOUT)
         p.communicate()
         ## don't care if failed; it just leaves garbage instances lying around
         return True, log, new_artifacts_by_id
     except OSError as e:
         if e.errno == errno.ENOENT:
             raise MinionException("could not run docker (is it installed?)")
         raise e
     finally:
         if tmp is not None:
             shutil.rmtree(tmp)