def databases(mbsimenv_type): if mbsimenv_type != "buildsystem": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), 'OPTIONS': { 'timeout': 60, }, } } else: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'mbsimenv-service-database', 'USER': '******', 'PASSWORD': mbsimenvSecrets.getSecrets()["postgresPassword"] if "postgresPassword" in mbsimenvSecrets.getSecrets() else "", 'HOST': 'database', 'PORT': '5432', 'CONN_MAX_AGE': 30, } } return DATABASES
def setGithubStatus(run, state): # skip for none build system runs if not args.buildSystemRun: return # skip for -nonedefbranches buildTypes if run.buildType.find("-nonedefbranches")>=0: return import github if state=="pending": description="Build started at %s"%(run.startTime.isoformat()+"Z") elif state=="failure": description="Build failed after %.1f min"%((run.endTime-run.startTime).total_seconds()/60) elif state=="success": description="Build passed after %.1f min"%((run.endTime-run.startTime).total_seconds()/60) else: raise RuntimeError("Unknown state "+state+" provided") try: gh=github.Github(mbsimenvSecrets.getSecrets()["githubStatusAccessToken"]) for repo in ["fmatvec", "hdf5serie", "openmbv", "mbsim"]: if os.environ["MBSIMENVTAGNAME"]=="latest": commit=gh.get_repo("mbsim-env/"+repo).get_commit(getattr(run, repo+"UpdateCommitID")) commit.create_status(state, "https://"+os.environ['MBSIMENVSERVERNAME']+django.urls.reverse("builds:run", args=[run.id]), description, "builds/%s/%s/%s/%s/%s"%(run.buildType, run.fmatvecBranch, run.hdf5serieBranch, run.openmbvBranch, run.mbsimBranch)) else: print("Skipping setting github status, this is the staging system!") except ex: if django.conf.settings.DEBUG: raise ex else: raise RuntimeError("Original exception avoided in setGithubStatus to ensure that no secret is printed.")
break except django.db.utils.OperationalError: print("Waiting for database to startup. Retry in 0.5s") time.sleep(0.5) # database migrations django.core.management.call_command("migrate", interactive=False, traceback=True, no_color=True) # create superuser (may fail if already exists) try: django.core.management.call_command("createsuperuser", interactive=False, username="******", email="*****@*****.**") except django.core.management.base.CommandError: pass # set superuser password user=django.contrib.auth.models.User.objects.get(username='******') user.set_password(mbsimenvSecrets.getSecrets()["djangoAdminPassword"]) user.save() # set site-name site=django.contrib.sites.models.Site.objects.get(id=1) site.domain=os.environ["MBSIMENVSERVERNAME"] site.name=os.environ["MBSIMENVSERVERNAME"] site.save() # create github app sa, _=allauth.socialaccount.models.SocialApp.objects.get_or_create(provider="github") sa.name="MBSim-Environment Build Service" sa.client_id=mbsimenvSecrets.getSecrets()["githubAppClientID"] sa.secret=mbsimenvSecrets.getSecrets()["githubAppSecret"] sa.save() sa.sites.add(django.contrib.sites.models.Site.objects.get(id=1))
def main(): parseArguments() args.sourceDir=os.path.abspath(args.sourceDir) mbsimenvSecrets.getSecrets() if args.buildSystemRun: os.environ["DJANGO_SETTINGS_MODULE"]="mbsimenv.settings_buildsystem" else: if os.path.isfile("/.dockerenv"): os.environ["DJANGO_SETTINGS_MODULE"]="mbsimenv.settings_localdocker" else: os.environ["DJANGO_SETTINGS_MODULE"]="mbsimenv.settings_local" django.setup() if django.conf.settings.MBSIMENV_TYPE=="local" or django.conf.settings.MBSIMENV_TYPE=="localdocker": s=base.helper.startLocalServer(args.localServerPort, django.conf.settings.MBSIMENV_TYPE=="localdocker") print("Build info is avaiable at: http://%s:%d%s"%(s["hostname"], s["port"], django.urls.reverse("builds:current_buildtype", args=[args.buildType]))) print("") removeOldBuilds() # all tools to be build including the tool dependencies global toolDependencies toolDependencies={ # |ToolName |WillFail (if WillFail is true no Atom Feed error is reported if this Tool fails somehow) 'fmatvec': [False, set([ # depends on ])], 'hdf5serie/h5plotserie': [False, set([ # depends on 'hdf5serie/hdf5serie', ])], 'hdf5serie/hdf5serie': [False, set([ # depends on 'fmatvec', ])], 'openmbv/mbxmlutils': [False, set([ # depends on 'fmatvec', ])], 'openmbv/openmbv': [False, set([ # depends on 'openmbv/openmbvcppinterface', 'hdf5serie/hdf5serie', ])], 'openmbv/openmbvcppinterface': [False, set([ # depends on 'hdf5serie/hdf5serie', 'openmbv/mbxmlutils', ])], 'mbsim/kernel': [False, set([ # depends on 'fmatvec', 'openmbv/openmbvcppinterface', ])], 'mbsim/modules/mbsimHydraulics': [False, set([ # depends on 'mbsim/kernel', 'mbsim/modules/mbsimControl', ])], 'mbsim/modules/mbsimFlexibleBody': [False, set([ # depends on 'mbsim/kernel', 'mbsim/thirdparty/nurbs++', ])], 'mbsim/thirdparty/nurbs++': [False, set([ # depends on ])], 'mbsim/modules/mbsimElectronics': [False, set([ # depends on 'mbsim/kernel', 'mbsim/modules/mbsimControl', ])], 'mbsim/modules/mbsimControl': [False, set([ # depends on 'mbsim/kernel', ])], 'mbsim/modules/mbsimPhysics': [False, set([ # depends on 'mbsim/kernel', ])], 'mbsim/modules/mbsimInterface': [False, set([ # depends on 'mbsim/kernel', 'mbsim/modules/mbsimControl', ])], 'mbsim/mbsimxml': [False, set([ # depends on 'mbsim/kernel', 'openmbv/openmbvcppinterface', 'openmbv/mbxmlutils', # dependencies to mbsim modules are only required for correct xmldoc generation 'mbsim/modules/mbsimHydraulics', 'mbsim/modules/mbsimFlexibleBody', 'mbsim/modules/mbsimElectronics', 'mbsim/modules/mbsimControl', 'mbsim/modules/mbsimPhysics', 'mbsim/modules/mbsimInterface', ])], 'mbsim/mbsimgui': [False, set([ # depends on 'openmbv/openmbv', 'openmbv/mbxmlutils', 'mbsim/mbsimxml', ])], 'mbsim/mbsimfmi': [False, set([ # depends on 'mbsim/kernel', 'mbsim/mbsimxml', 'mbsim/modules/mbsimControl', ])], } # extend the dependencies recursively addAllDepencencies() # set docDir global docDir docDir=pj(args.prefix, "share", "mbxmlutils", "doc") # append path to PKG_CONFIG_PATH to find mbxmlutils and co. by runexmaples.py pkgConfigDir=os.path.normpath(pj(docDir, os.pardir, os.pardir, os.pardir, "lib", "pkgconfig")) if "PKG_CONFIG_PATH" in os.environ: os.environ["PKG_CONFIG_PATH"]=pkgConfigDir+os.pathsep+os.environ["PKG_CONFIG_PATH"] else: os.environ["PKG_CONFIG_PATH"]=pkgConfigDir # enable coverage if args.coverage: if not "CFLAGS" in os.environ: os.environ["CFLAGS"]="" if not "CXXFLAGS" in os.environ: os.environ["CXXFLAGS"]="" if not "LDFLAGS" in os.environ: os.environ["LDFLAGS" ]="" os.environ["CFLAGS"]=os.environ["CFLAGS"]+" --coverage" os.environ["CXXFLAGS"]=os.environ["CXXFLAGS"]+" --coverage" os.environ["LDFLAGS" ]=os.environ["LDFLAGS" ]+" --coverage -lgcov" # start messsage print("Started build process.") sys.stdout.flush() run=builds.models.Run() run.buildType=args.buildType run.command=" ".join(sys.argv) run.startTime=django.utils.timezone.now() run.save() nrFailed=0 nrRun=0 # update all repositories if not args.disableUpdate: nrRun+=1 localRet, commitidfull=repoUpdate(run) if localRet!=0: nrFailed+=1 # # check if last build was the same as this build # if not args.forceBuild and args.buildSystemRun and lastcommitidfull==commitidfull: # print('Skipping this build: the last build was exactly the same.') # sys.stdout.flush() # return 255 # build skipped, same as last build # set status on commit setGithubStatus(run, "pending") # clean prefix dir if args.enableCleanPrefix and os.path.isdir(args.prefix if args.prefix is not None else args.prefixAuto): shutil.rmtree(args.prefix if args.prefix is not None else args.prefixAuto) os.makedirs(args.prefix if args.prefix is not None else args.prefixAuto) # a sorted list of all tools te be build (in the correct order according the dependencies) orderedBuildTools=list() sortBuildTools(set(toolDependencies) if args.buildTools is None else set(args.buildTools), orderedBuildTools) # list tools which are not updated and must not be rebuild according dependencies for toolName in set(toolDependencies)-set(orderedBuildTools): tool=builds.models.Tool() tool.run=run tool.toolName=toolName tool.willFail=toolDependencies[toolName][0] tool.save() # remove all "*.gcno", "*.gcda" files if not args.disableMake and not args.disableMakeClean and args.coverage: for e in ["fmatvec", "hdf5serie", "openmbv", "mbsim"]: for d,_,files in os.walk(pj(args.sourceDir, e+args.binSuffix)): for f in files: if os.path.splitext(f)[1]==".gcno": os.remove(pj(d, f)) if os.path.splitext(f)[1]==".gcda": os.remove(pj(d, f)) # build the other tools in order nr=1 for toolName in orderedBuildTools: print("Building "+str(nr)+"/"+str(len(orderedBuildTools))+": "+toolName+": ", end="") sys.stdout.flush() nrFailedLocal, nrRunLocal=build(toolName, run) if toolDependencies[toolName][0]==False: nrFailed+=nrFailedLocal nrRun+=nrRunLocal nr+=1 run.toolsFailed=run.tools.filterFailed().count() run.save() # write main doc file mainDocPage() # create distribution if args.enableDistribution: nrRun=nrRun+1 print("Create distribution") sys.stdout.flush() cdRet=createDistribution(run) if cdRet!=0: nrFailed=nrFailed+1 run.endTime=django.utils.timezone.now() run.save() # run examples runExamplesErrorCode=0 if not args.disableRunExamples: savedDir=os.getcwd() os.chdir(pj(args.sourceDir, "mbsim", "examples")) print("Running examples in "+os.getcwd()) sys.stdout.flush() runExamplesErrorCode=runexamples(run) os.chdir(savedDir) # update status on commitid setGithubStatus(run, "success" if nrFailed==0 else "failure") if nrFailed>0: print("\nERROR: %d of %d build parts failed!!!!!"%(nrFailed, nrRun)); sys.stdout.flush() if nrFailed>0: return 1 # build failed if abs(runExamplesErrorCode)>0: return 2 # examples failed return 0 # all passed
import os import logging import base.helper import mbsimenvSecrets import importlib.util import django.contrib.staticfiles.storage # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = mbsimenvSecrets.getSecrets()["djangoSecretKey"] # if debugging enabled python requests (urllib3) logging def debug(enabled): if enabled: l = logging.getLogger("urllib3") l.setLevel(logging.DEBUG) l.addHandler(logging.StreamHandler()) ALLOWED_HOSTS = [ os.environ.get("MBSIMENVSERVERNAME", ""), 'localhost', '127.0.0.1', '[::1]' ] # Application definition
# wait for server (try with dummy and real password) env = os.environ.copy() while True: if pg.poll() is not None: print("database failed to start.") sys.exit(pg.returncode) env["PGPASSWORD"] = "******" if subprocess.call([ "/usr/pgsql-13/bin/psql", "-l", "-h", "localhost", "--username=mbsimenvuser" ], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, env=env) == 0: break env["PGPASSWORD"] = mbsimenvSecrets.getSecrets()["postgresPassword"] if subprocess.call([ "/usr/pgsql-13/bin/psql", "-l", "-h", "localhost", "--username=mbsimenvuser" ], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, env=env) == 0: break print("Waiting for database to startup. Retry in 0.5s") time.sleep(0.5) # change password if it is dummy if env["PGPASSWORD"] == "dummy": if subprocess.call(["/usr/pgsql-13/bin/psql", "-h", "localhost", "--username=mbsimenvuser", "mbsimenv-service-database", "-c", "ALTER USER mbsimenvuser WITH PASSWORD '%s';"% \
def webhook(request): rawdata = request.body sig = request.headers['X_HUB_SIGNATURE'][5:] try: if not hmac.compare_digest( sig, hmac.new( mbsimenvSecrets.getSecrets()["githubWebhookSecret"].encode( 'utf-8'), rawdata, hashlib.sha1).hexdigest()): return django.http.HttpResponseForbidden() except ex: if django.conf.settings.DEBUG: raise ex else: raise RuntimeError( "Original exception avoided in webhook to ensure that no secret is printed." ) event = request.headers['X-GitHub-Event'] res = {"event": event} if event == "push": # get repo, branch and commit from this push data = json.loads(rawdata) res["repo"] = data['repository']['name'] if data['ref'][0:11] != "refs/heads/": return django.http.HttpResponseBadRequest("Illegal data in 'ref'.") res["branch"] = data['ref'][11:] res["commitID"] = data["after"] if res["repo"] == "fmatvec" or res["repo"] == "hdf5serie" or res[ "repo"] == "openmbv" or res["repo"] == "mbsim": res["addedBranchCombinations"] = [] # get all branch combinations to build as save in queue masterRecTime = django.utils.timezone.now( ) # we push the master/master/master/master branch combi first for bc in service.models.CIBranches.objects.filter( **{res["repo"] + "Branch": res["branch"]}): branchCombination = { "fmatvecBranch": bc.fmatvecBranch, "hdf5serieBranch": bc.hdf5serieBranch, "openmbvBranch": bc.openmbvBranch, "mbsimBranch": bc.mbsimBranch, } if bc.fmatvecBranch == "master" and bc.hdf5serieBranch == "master" and bc.openmbvBranch == "master" and bc.mbsimBranch == "master": recTime = masterRecTime else: recTime = django.utils.timezone.now() ciq, _ = service.models.CIQueue.objects.get_or_create( **branchCombination, defaults={"recTime": recTime}) ciq.recTime = recTime ciq.save() res["addedBranchCombinations"].append(branchCombination) elif res["repo"] == "build": if res["branch"] == "staging": ciq = service.models.CIQueue() ciq.buildCommitID = res["commitID"] ciq.recTime = django.utils.timezone.now() ciq.save() res["addedBuildCommitID"] = res["commitID"] else: res["skipNoneStagingBranch"] = res["commitID"] else: return django.http.HttpResponseBadRequest("Unknown repo.") return django.http.JsonResponse(res) else: return django.http.HttpResponseBadRequest("Unhandled webhook event.")