def welcome(update: Update, context): logger.info('Received new user event') new_member = update.message.new_chat_members[0] logger.info(f'Waiting {config.WELCOME_DELAY} seconds until user completes captcha...') sleep(config.WELCOME_DELAY) membership_info = context.bot.get_chat_member(update.message.chat_id, new_member.id) if membership_info['status'] == 'left': logger.info(f'Skipping welcome message, user {new_member.name} is no longer in the chat') return logger.info(f'send welcome message for {new_member.name}') msg = None if new_member.is_bot: msg = f"{new_member.name} is a *bot*!! " \ "-> It could be kindly removed 🗑" else: if utils.is_bot(new_member): context.bot.delete_message(update.message.chat_id, update.message.message_id) if context.bot.kick_chat_member(update.message.chat_id, new_member.id): msg = (f"*{new_member.username}* has been banned because I " "considered it was a bot. ") else: msg = f"Welcome {new_member.name}!! " \ "I am a friendly and polite *bot* 🤖" if msg: context.bot.send_message( chat_id=update.message.chat_id, text=msg, parse_mode=telegram.ParseMode.MARKDOWN )
async def forward_msg(self, msg: Message, to: int, what: str = 'photo') -> None: if await self.blacklist_checker(msg): # if msg.from_user and msg.from_user.id == 630175608: return # block tgcn-captcha self.func_blacklist( BlackListForwardRequest( msg, LogStruct(True, 'forward blacklist context %s from %s (id: %d)', what, msg.chat.title, msg.chat.id))) return forward_target = to # spec_target = None if what == 'other' else await self.redis.get(f'{self.redis_prefix}{msg.chat.id}') spec_target = None if what == 'other' else await self.redis.get( str(msg.chat.id)) if spec_target is None: # spec_target = await self.redis.get(f'{self.redis_prefix}{msg.forward_from_chat.id}') if msg.forward_from_chat: spec_target = await self.redis.get( str(msg.forward_from_chat.id)) if spec_target is not None: forward_target = getattr(self.configure, spec_target.decode()) elif is_bot(msg): forward_target = self.configure.bot self.ForwardThread.put( ForwardRequest( forward_target, msg, LogStruct(True, 'forward %s from %s (id: %d)', what, msg.chat.title, msg.chat.id)))
async def forward_msg(self, msg: Message, to: int, what: str = 'photo') -> None: if msg.has_protected_content: return if await self.redis.check_msg_from_blacklist(msg): return forward_target = to # spec_target = None if what == 'other' else await self.redis.get(f'{self.redis_prefix}{msg.chat.id}') spec_target = None if what == 'other' else await self.redis.get( str(msg.chat.id)) if spec_target is None: # spec_target = await self.redis.get(f'{self.redis_prefix}{msg.forward_from_chat.id}') if msg.forward_from_chat: spec_target = await self.redis.get( str(msg.forward_from_chat.id)) if spec_target is not None: forward_target = getattr(self.configure, spec_target.decode()) elif is_bot(msg): forward_target = self.configure.bot self.ForwardThread.put( ForwardRequest( forward_target, msg, LogStruct(True, 'forward %s from %s (id: %d)', what, msg.chat.title, msg.chat.id)))
def test_is_bot_real_sample(): name = ( '[VX.QQ同号253239090]电报社群增粉仅1毛,量大价优,可指定群指定筛选条件' '及速度,提供明细报表[群发私发][社区运营][成品账号]欢迎项目方交易所洽' '谈合作,诚招全球代理 ALL MARKET BEST PRICE FOR WORLDWIDE REAL n ACTIVE' ' TELEGRAM GROUP(CHANNEL) HUMAN MEMBERS,QUALITY AND QUANTITY' ' GUARANTEED[Telegram:marvelwork/Email:smartelegram at outlook.com]') user = Mock(first_name=name) assert utils.is_bot(user)
def ban_bots(bot, update): new_member = update.message.new_chat_members[0] if is_bot(new_member): if bot.kick_chat_member(update.message.chat_id, new_member.id): msg = f"*{new_member.username}* has been banned because I think he was a bot. " \ f"`Replicants are like any other machine, are either a benefit or a hazard. " \ f"If they're a benefit it's not my problem.`" bot.send_message(chat_id=update.message.chat_id, text=msg, parse_mode=telegram.ParseMode.MARKDOWN)
def welcome(bot: Bot, update: Update): logger.info('Received new user event') new_member = update.message.new_chat_members[0] logger.info(f'send welcome message for {new_member.name}') msg = None if new_member.is_bot: msg = f"{new_member.name} is a *bot*!! " \ "-> It could be kindly removed 🗑" else: if utils.is_bot(new_member): bot.delete_message(update.message.chat_id, update.message.message_id) if bot.kick_chat_member(update.message.chat_id, new_member.id): msg = (f"*{new_member.username}* has been banned because I " "considered it was a bot. ") else: msg = f"Welcome {new_member.name}!! " \ "I am a friendly and polite *bot* 🤖" if msg: bot.send_message(chat_id=update.message.chat_id, text=msg, parse_mode=telegram.ParseMode.MARKDOWN)
async def run(self) -> None: checkfunc = self.checker.checkFile if not self.dirty_run else self.checker.checkFile_dirty photos, videos, docs = [], [], [] msg_group = await self.client.get_history(self.target_id, offset_id=self.offset_id) await self.client.send_message( self.chat_id, 'Now process query {}, total {} messages{}'.format( self.target_id, msg_group.messages[0]['message_id'], ' (Dirty mode)' if self.dirty_run else '')) status_thread = SetTypingCoroutine(self.client, self.chat_id) self.offset_id = msg_group.messages[0]['message_id'] while self.offset_id > 1: for x in list(msg_group.messages): if x.photo: if not await checkfunc(x.photo.sizes[-1].file_unique_id): continue photos.append((is_bot(x), { 'chat': { 'id': self.target_id }, 'message_id': x['message_id'] })) elif x.video: if not await checkfunc(x.video.file_unique_id): continue videos.append((is_bot(x), { 'chat': { 'id': self.target_id }, 'message_id': x['message_id'] })) elif x.document: if '/' in x.document.mime_type and x.document.mime_type.split('/')[0] in ('image', 'video') and \ not await checkfunc(x.document.file_unique_id): continue docs.append((is_bot(x), { 'chat': { 'id': self.target_id }, 'message_id': x['message_id'] })) try: self.offset_id = msg_group.messages[-1]['message_id'] - 1 except IndexError: logger.info('Query channel end by message_id %d', self.offset_id + 1) break try: msg_group = await self.client.get_history( self.target_id, offset_id=self.offset_id) except pyrogram.errors.FloodWait as e: logger.warning('Got flood wait, sleep %d seconds', e.x) await asyncio.sleep(e.x) if not self.dirty_run: await self.client.send_message(self.configure.query_photo, f'Begin {self.target_id} forward') await self.client.send_message(self.configure.query_video, f'Begin {self.target_id} forward') await self.client.send_message(self.configure.query_doc, f'Begin {self.target_id} forward') for x in reversed(photos): ForwardThread.put( ForwardRequest( self.configure.query_photo if not x[0] else self.configure.bot_for, x[1])) for x in reversed(videos): ForwardThread.put( ForwardRequest( self.configure.query_video if not x[0] else self.configure.bot_for, x[1])) for x in reversed(docs): ForwardThread.put( ForwardRequest( self.configure.query_doc if not x[0] else self.configure.bot_for, x[1])) status_thread.set_off() await self.client.send_message( self.chat_id, 'Query completed {} photos,' ' {} videos, {} docs{}'.format( len(photos), len(videos), len(docs), ' (Dirty mode)' if self.dirty_run else '')) logger.info( 'Query %d completed%s, total %d photos, %d videos, %d documents.', self.target_id, ' (Dirty run)' if self.dirty_run else '', len(photos), len(videos), len(docs)) del photos, videos, docs
def Main(argv): (options, args) = ParseOptions(argv) if (len(args) > 0): raise Exception('Unsupported arguments') if not utils.is_bot() and not (options.dry_run or options.build_only): raise Exception('You are not a bot, don\'t archive builds. ' + 'Use --dry-run or --build-only to test locally') if options.dry_run_output: MustBeExistingDirectory(options.dry_run_output) if options.build_only: MustBeExistingDirectory(options.build_only) if utils.is_bot(): archive.SetRLimitToMax() # Make sure bazel is extracted in third_party. utils.DownloadFromGoogleCloudStorage(utils.BAZEL_SHA_FILE) utils.DownloadFromGoogleCloudStorage(utils.JAVA8_SHA_FILE) utils.DownloadFromGoogleCloudStorage(utils.JAVA11_SHA_FILE) if options.build_only: with utils.TempDir() as checkout_dir: CloneDesugaredLibrary(options.github_account, checkout_dir) (library_jar, maven_zip) = BuildDesugaredLibrary(checkout_dir, "jdk8") shutil.copyfile( library_jar, os.path.join(options.build_only, os.path.basename(library_jar))) shutil.copyfile( maven_zip, os.path.join(options.build_only, os.path.basename(maven_zip))) return # Only handling versioned desugar_jdk_libs. is_master = False with utils.TempDir() as checkout_dir: CloneDesugaredLibrary(options.github_account, checkout_dir) version = GetVersion(os.path.join(checkout_dir, VERSION_FILE)) destination = archive.GetVersionDestination( 'gs://', LIBRARY_NAME + '/' + version, is_master) if utils.cloud_storage_exists(destination) and not options.dry_run: raise Exception('Target archive directory %s already exists' % destination) (library_jar, maven_zip) = BuildDesugaredLibrary(checkout_dir, "jdk8") storage_path = LIBRARY_NAME + '/' + version # Upload the jar file with the library. destination = archive.GetUploadDestination(storage_path, LIBRARY_NAME + '.jar', is_master) Upload(options, library_jar, storage_path, destination, is_master) # Upload the maven zip file with the library. destination = archive.GetUploadDestination(storage_path, LIBRARY_NAME + '.zip', is_master) Upload(options, maven_zip, storage_path, destination, is_master) # Upload the jar file for accessing GCS as a maven repro. maven_destination = archive.GetUploadDestination( utils.get_maven_path('desugar_jdk_libs', version), 'desugar_jdk_libs-%s.jar' % version, is_master) if options.dry_run: print('Dry run, not actually creating maven repo') else: utils.upload_file_to_cloud_storage(library_jar, maven_destination) print('Maven repo root available at: %s' % archive.GetMavenUrl(is_master))
def Main(): (options, args) = ParseOptions() if not utils.is_bot() and not options.dry_run: raise Exception('You are not a bot, don\'t archive builds. ' + 'Use --dry-run to test locally') if (options.dry_run_output and (not os.path.exists(options.dry_run_output) or not os.path.isdir(options.dry_run_output))): raise Exception(options.dry_run_output + ' does not exist or is not a directory') if utils.is_bot() and not utils.IsWindows(): SetRLimitToMax() if not utils.IsWindows(): PrintResourceInfo() # Create maven release which uses a build that exclude dependencies. create_maven_release.generate_r8_maven_zip(utils.MAVEN_ZIP) create_maven_release.generate_r8_maven_zip(utils.MAVEN_ZIP_LIB, is_r8lib=True) # Generate and copy a full build without dependencies. gradle.RunGradleExcludeDeps([utils.R8, utils.R8_SRC]) shutil.copyfile(utils.R8_JAR, utils.R8_FULL_EXCLUDE_DEPS_JAR) # Ensure all archived artifacts has been built before archiving. # The target tasks postfixed by 'lib' depend on the actual target task so # building it invokes the original task first. # The '-Pno_internal' flag is important because we generate the lib based on uses in tests. gradle.RunGradle([ utils.R8, utils.D8, utils.R8LIB, utils.R8LIB_NO_DEPS, utils.R8RETRACE, utils.R8RETRACE_NO_DEPS, utils.LIBRARY_DESUGAR_CONVERSIONS, '-Pno_internal' ]) # Create maven release of the desuage_jdk_libs configuration. This require # an r8.jar with dependencies to have been built. create_maven_release.generate_desugar_configuration_maven_zip( utils.DESUGAR_CONFIGURATION_MAVEN_ZIP) version = GetVersion() is_master = IsMaster(version) if is_master: # On master we use the git hash to archive with print('On master, using git hash for archiving') version = GetGitHash() destination = GetVersionDestination('gs://', version, is_master) if utils.cloud_storage_exists(destination) and not options.dry_run: raise Exception('Target archive directory %s already exists' % destination) with utils.TempDir() as temp: # Create pom file for our maven repository that we build for testing. default_pom_file = os.path.join(temp, 'r8.pom') create_maven_release.write_default_r8_pom_file(default_pom_file, version) version_file = os.path.join(temp, 'r8-version.properties') with open(version_file, 'w') as version_writer: version_writer.write('version.sha=' + GetGitHash() + '\n') if not os.environ.get('SWARMING_BOT_ID') and not options.dry_run: raise Exception('Environment variable SWARMING_BOT_ID not set') releaser = \ ("<local developer build>" if options.dry_run else 'releaser=go/r8bot (' + os.environ.get('SWARMING_BOT_ID') + ')\n') version_writer.write(releaser) version_writer.write('version-file.version.code=1\n') for file in [ utils.D8_JAR, utils.R8_JAR, utils.R8LIB_JAR, utils.R8LIB_JAR + '.map', utils.R8_SRC_JAR, utils.R8_FULL_EXCLUDE_DEPS_JAR, utils.R8LIB_EXCLUDE_DEPS_JAR, utils.R8LIB_EXCLUDE_DEPS_JAR + '.map', utils.R8RETRACE_JAR, utils.R8RETRACE_EXCLUDE_DEPS_JAR, utils.MAVEN_ZIP, utils.MAVEN_ZIP_LIB, utils.DESUGAR_CONFIGURATION, utils.DESUGAR_CONFIGURATION_MAVEN_ZIP, utils.GENERATED_LICENSE, ]: file_name = os.path.basename(file) tagged_jar = os.path.join(temp, file_name) shutil.copyfile(file, tagged_jar) if file_name.endswith( '.jar') and not file_name.endswith('-src.jar'): with zipfile.ZipFile(tagged_jar, 'a') as zip: zip.write(version_file, os.path.basename(version_file)) destination = GetUploadDestination(version, file_name, is_master) print('Uploading %s to %s' % (tagged_jar, destination)) if options.dry_run: if options.dry_run_output: dry_run_destination = os.path.join(options.dry_run_output, file_name) print('Dry run, not actually uploading. Copying to ' + dry_run_destination) shutil.copyfile(tagged_jar, dry_run_destination) else: print('Dry run, not actually uploading') else: utils.upload_file_to_cloud_storage(tagged_jar, destination) print('File available at: %s' % GetUrl(version, file_name, is_master)) # Upload R8 to a maven compatible location. if file == utils.R8_JAR: maven_dst = GetUploadDestination( utils.get_maven_path('r8', version), 'r8-%s.jar' % version, is_master) maven_pom_dst = GetUploadDestination( utils.get_maven_path('r8', version), 'r8-%s.pom' % version, is_master) if options.dry_run: print('Dry run, not actually creating maven repo for R8') else: utils.upload_file_to_cloud_storage(tagged_jar, maven_dst) utils.upload_file_to_cloud_storage(default_pom_file, maven_pom_dst) print('Maven repo root available at: %s' % GetMavenUrl(is_master)) # Upload desugar_jdk_libs configuration to a maven compatible location. if file == utils.DESUGAR_CONFIGURATION: jar_basename = 'desugar_jdk_libs_configuration.jar' jar_version_name = 'desugar_jdk_libs_configuration-%s.jar' % version maven_dst = GetUploadDestination( utils.get_maven_path('desugar_jdk_libs_configuration', version), jar_version_name, is_master) with utils.TempDir() as tmp_dir: desugar_jdk_libs_configuration_jar = os.path.join( tmp_dir, jar_version_name) create_maven_release.generate_jar_with_desugar_configuration( utils.DESUGAR_CONFIGURATION, utils.DESUGAR_IMPLEMENTATION, utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP, desugar_jdk_libs_configuration_jar) if options.dry_run: print( 'Dry run, not actually creating maven repo for ' + 'desugar configuration.') if options.dry_run_output: shutil.copyfile( desugar_jdk_libs_configuration_jar, os.path.join(options.dry_run_output, jar_version_name)) else: utils.upload_file_to_cloud_storage( desugar_jdk_libs_configuration_jar, maven_dst) print('Maven repo root available at: %s' % GetMavenUrl(is_master)) # Also archive the jar as non maven destination for Google3 jar_destination = GetUploadDestination( version, jar_basename, is_master) utils.upload_file_to_cloud_storage( desugar_jdk_libs_configuration_jar, jar_destination)
def test_is_tgmember_sect_real(tgmember_name): user = Mock(first_name=tgmember_name) assert utils.is_bot(user)
def Main(argv): (options, args) = ParseOptions(argv) if (len(args) > 0): raise Exception('Unsupported arguments') if not utils.is_bot() and not options.dry_run: raise Exception('You are not a bot, don\'t archive builds. ' + 'Use --dry-run to test locally') if (options.dry_run_output and (not os.path.exists(options.dry_run_output) or not os.path.isdir(options.dry_run_output))): raise Exception(options.dry_run_output + ' does not exist or is not a directory') if utils.is_bot(): archive.SetRLimitToMax() # Make sure bazel is extracted in third_party. utils.DownloadFromGoogleCloudStorage(utils.BAZEL_SHA_FILE) utils.DownloadFromGoogleCloudStorage(utils.JAVA8_SHA_FILE) # Only handling versioned desugar_jdk_libs. is_master = False with utils.TempDir() as checkout_dir: git_utils.GitClone( 'https://github.com/' + options.github_account + '/' + LIBRARY_NAME, checkout_dir) with utils.ChangedWorkingDirectory(checkout_dir): version = GetVersion() destination = archive.GetVersionDestination( 'gs://', LIBRARY_NAME + '/' + version, is_master) if utils.cloud_storage_exists(destination) and not options.dry_run: raise Exception('Target archive directory %s already exists' % destination) bazel = os.path.join(utils.BAZEL_TOOL, 'lib', 'bazel', 'bin', 'bazel') cmd = [bazel, 'build', '--host_force_python=PY2', 'maven_release'] utils.PrintCmd(cmd) subprocess.check_call(cmd) cmd = [bazel, 'shutdown'] utils.PrintCmd(cmd) subprocess.check_call(cmd) # Compile the stubs for conversion files compilation. stub_compiled_folder = os.path.join(checkout_dir, 'stubs') os.mkdir(stub_compiled_folder) all_stubs = GetFilesInFolder( os.path.join(CONVERSION_FOLDER, 'stubs')) cmd = [JDK8_JAVAC, '-d', stub_compiled_folder] + all_stubs utils.PrintCmd(cmd) subprocess.check_call(cmd) # Compile the conversion files. conversions_compiled_folder = os.path.join(checkout_dir, 'conversions') os.mkdir(conversions_compiled_folder) all_conversions = GetFilesInFolder( os.path.join(CONVERSION_FOLDER, 'conversions')) cmd = [ JDK8_JAVAC, '-cp', stub_compiled_folder, '-d', conversions_compiled_folder ] + all_conversions utils.PrintCmd(cmd) subprocess.check_call(cmd) # Locate the library jar and the maven zip with the jar from the # bazel build. library_jar = os.path.join('bazel-bin', 'src', 'share', 'classes', 'java', 'libjava.jar') maven_zip = os.path.join('bazel-bin', LIBRARY_NAME + '.zip') # Make a writable copy of the jar. jar_folder = os.path.join(checkout_dir, 'jar') os.mkdir(jar_folder) shutil.copy(library_jar, jar_folder) library_jar = os.path.join(jar_folder, 'libjava.jar') os.chmod(library_jar, 0o777) # Add conversion classes into the jar. all_compiled_conversions = GetFilesInFolder( conversions_compiled_folder) with zipfile.ZipFile(library_jar, mode='a', allowZip64=True) as jar: for clazz in all_compiled_conversions: jar.write(clazz, arcname=os.path.relpath( clazz, conversions_compiled_folder), compress_type=zipfile.ZIP_DEFLATED) storage_path = LIBRARY_NAME + '/' + version # Upload the jar file with the library. destination = archive.GetUploadDestination(storage_path, LIBRARY_NAME + '.jar', is_master) Upload(options, library_jar, storage_path, destination, is_master) # Upload the maven zip file with the library. destination = archive.GetUploadDestination(storage_path, LIBRARY_NAME + '.zip', is_master) Upload(options, maven_zip, storage_path, destination, is_master) # Upload the jar file for accessing GCS as a maven repro. maven_destination = archive.GetUploadDestination( utils.get_maven_path('desugar_jdk_libs', version), 'desugar_jdk_libs-%s.jar' % version, is_master) if options.dry_run: print('Dry run, not actually creating maven repo') else: utils.upload_file_to_cloud_storage(library_jar, maven_destination) print('Maven repo root available at: %s' % archive.GetMavenUrl(is_master))
def Main(): (options, args) = ParseOptions() if utils.is_bot(): gradle.RunGradle(['--no-daemon', 'clean']) gradle_args = ['--stacktrace'] if utils.is_bot(): # Bots don't like dangling processes. gradle_args.append('--no-daemon') # Set all necessary Gradle properties and options first. if options.shard_count: assert options.shard_number gradle_args.append('-Pshard_count=%s' % options.shard_count) gradle_args.append('-Pshard_number=%s' % options.shard_number) if options.verbose: gradle_args.append('-Pprint_test_stdout') if options.no_internal: gradle_args.append('-Pno_internal') if options.only_internal: gradle_args.append('-Ponly_internal') if options.all_tests: gradle_args.append('-Pall_tests') if options.slow_tests: gradle_args.append('-Pslow_tests=1') if options.tool: gradle_args.append('-Ptool=%s' % options.tool) if options.one_line_per_test: gradle_args.append('-Pone_line_per_test') if options.jctf: gradle_args.append('-Pjctf') if options.only_jctf: gradle_args.append('-Ponly_jctf') if options.test_namespace: gradle_args.append('-Ptest_namespace=%s' % options.test_namespace) if options.jctf_compile_only: gradle_args.append('-Pjctf_compile_only') if options.disable_assertions: gradle_args.append('-Pdisable_assertions') if options.with_code_coverage: gradle_args.append('-Pwith_code_coverage') if os.name == 'nt': # temporary hack gradle_args.append('-Pno_internal') gradle_args.append('-x') gradle_args.append('createJctfTests') gradle_args.append('-x') gradle_args.append('jctfCommonJar') gradle_args.append('-x') gradle_args.append('jctfTestsClasses') if options.test_dir: gradle_args.append('-Ptest_dir=' + options.test_dir) if not os.path.exists(options.test_dir): os.makedirs(options.test_dir) if options.java_home: gradle_args.append('-Dorg.gradle.java.home=' + options.java_home) if options.java_max_memory_size: gradle_args.append('-Ptest_xmx=' + options.java_max_memory_size) if options.generate_golden_files_to: gradle_args.append('-Pgenerate_golden_files_to=' + options.generate_golden_files_to) if not os.path.exists(options.generate_golden_files_to): os.makedirs(options.generate_golden_files_to) gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1()) if options.use_golden_files_in: gradle_args.append('-Puse_golden_files_in=' + options.use_golden_files_in) if not os.path.exists(options.use_golden_files_in): os.makedirs(options.use_golden_files_in) gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1()) if (not options.no_r8lib) and options.r8lib_no_deps: print( 'Cannot run tests on r8lib with and without deps. R8lib is now default target.' ) exit(1) if not options.no_r8lib: gradle_args.append('-Pr8lib') # Force gradle to build a version of r8lib without dependencies for # BootstrapCurrentEqualityTest. gradle_args.append('R8LibNoDeps') if options.r8lib_no_deps: gradle_args.append('-Pr8lib_no_deps') if options.worktree: gradle_args.append('-g=' + os.path.join(utils.REPO_ROOT, ".gradle_user_home")) gradle_args.append('--no-daemon') # Build an R8 with dependencies for bootstrapping tests before adding test sources. gradle_args.append('r8WithDeps') gradle_args.append('r8WithDeps11') gradle_args.append('r8WithRelocatedDeps') gradle_args.append('r8WithRelocatedDeps11') # Add Gradle tasks gradle_args.append('cleanTest') gradle_args.append('test') if options.fail_fast: gradle_args.append('--fail-fast') if options.failed: args = compute_failed_tests(args) if args is None: return 1 if len(args) == 0: print "No failing tests" return 0 # Test filtering. Must always follow the 'test' task. for testFilter in args: gradle_args.append('--tests') gradle_args.append(testFilter) if options.with_code_coverage: # Create Jacoco report after tests. gradle_args.append('jacocoTestReport') if options.use_golden_files_in: sha1 = '%s' % utils.get_HEAD_sha1() with utils.ChangedWorkingDirectory(options.use_golden_files_in): utils.download_file_from_cloud_storage( 'gs://r8-test-results/golden-files/%s.tar.gz' % sha1, '%s.tar.gz' % sha1) utils.unpack_archive('%s.tar.gz' % sha1) print_stacks_timeout = options.print_hanging_stacks if (utils.is_bot() and not utils.IsWindows()) or print_stacks_timeout > -1: timestamp_file = os.path.join(utils.BUILD, 'last_test_time') if os.path.exists(timestamp_file): os.remove(timestamp_file) gradle_args.append('-Pupdate_test_timestamp=' + timestamp_file) print_stacks_timeout = (print_stacks_timeout if print_stacks_timeout != -1 else TIMEOUT_HANDLER_PERIOD) thread.start_new_thread(timeout_handler, ( timestamp_file, print_stacks_timeout, )) rotate_test_reports() if options.only_jctf: # Note: not setting -Pruntimes will run with all available runtimes. return_code = gradle.RunGradle(gradle_args, throw_on_failure=False) return archive_and_return(return_code, options) if options.horizontal_class_merging: gradle_args.append('-PhorizontalClassMerging') # Now run tests on selected runtime(s). if options.runtimes: if options.dex_vm != 'default': print 'Unexpected runtimes and dex_vm argument: ' + options.dex_vm sys.exit(1) if options.runtimes == 'empty': # Set runtimes with no content will configure no runtimes. gradle_args.append('-Pruntimes=') elif options.runtimes == 'all': # An unset runtimes will configure all runtimes pass else: prefixes = [ prefix.strip() for prefix in options.runtimes.split(':') ] runtimes = [] for prefix in prefixes: matches = [ rt for rt in VALID_RUNTIMES if rt.startswith(prefix) ] if len(matches) == 0: print "Invalid runtime prefix '%s'." % prefix print "Must be just 'all', 'empty'," \ " or a prefix of %s" % ', '.join(VALID_RUNTIMES) sys.exit(1) runtimes.extend(matches) gradle_args.append('-Pruntimes=%s' % ':'.join(runtimes)) return_code = gradle.RunGradle(gradle_args, throw_on_failure=False) return archive_and_return(return_code, options) # Legacy testing populates the runtimes based on dex_vm. vms_to_test = [options.dex_vm] if options.dex_vm != "all" else ALL_ART_VMS for art_vm in vms_to_test: vm_suffix = "_" + options.dex_vm_kind if art_vm != "default" else "" runtimes = ['dex-' + art_vm] # Only append the "none" runtime and JVMs if running on the "default" DEX VM. if art_vm == "default": # TODO(b/170454076): Remove special casing for bot when rex-script has # been migrated to account for runtimes. if utils.is_bot(): runtimes.extend(['jdk11', 'none']) else: runtimes.extend(['jdk8', 'jdk9', 'jdk11', 'none']) return_code = gradle.RunGradle(gradle_args + [ '-Pdex_vm=%s' % art_vm + vm_suffix, '-Pruntimes=%s' % ':'.join(runtimes), ], throw_on_failure=False) if options.generate_golden_files_to: sha1 = '%s' % utils.get_HEAD_sha1() with utils.ChangedWorkingDirectory( options.generate_golden_files_to): archive = utils.create_archive(sha1) utils.upload_file_to_cloud_storage( archive, 'gs://r8-test-results/golden-files/' + archive) if return_code != 0: return archive_and_return(return_code, options) return 0
def Main(argv): (options, args) = ParseOptions(argv) if (len(args) > 0): raise Exception('Unsupported arguments') if not utils.is_bot() and not options.dry_run: raise Exception('You are not a bot, don\'t archive builds. ' + 'Use --dry-run to test locally') if (options.dry_run_output and (not os.path.exists(options.dry_run_output) or not os.path.isdir(options.dry_run_output))): raise Exception(options.dry_run_output + ' does not exist or is not a directory') if utils.is_bot(): archive.SetRLimitToMax() # Make sure bazel is extracted in third_party. utils.DownloadFromGoogleCloudStorage(utils.BAZEL_SHA_FILE) # Only handling versioned desugar_jdk_libs. is_master = False with utils.TempDir() as checkout_dir: git_utils.GitClone( 'https://github.com/' + options.github_account + '/' + LIBRARY_NAME, checkout_dir) with utils.ChangedWorkingDirectory(checkout_dir): version = GetVersion() destination = archive.GetVersionDestination( 'gs://', LIBRARY_NAME + '/' + version, is_master) if utils.cloud_storage_exists(destination) and not options.dry_run: raise Exception('Target archive directory %s already exists' % destination) bazel = os.path.join(utils.BAZEL_TOOL, 'lib', 'bazel', 'bin', 'bazel') cmd = [bazel, 'build', 'maven_release'] utils.PrintCmd(cmd) subprocess.check_call(cmd) cmd = [bazel, 'shutdown'] utils.PrintCmd(cmd) subprocess.check_call(cmd) # Locate the library jar and the maven zip with the jar from the # bazel build. library_jar = os.path.join('bazel-bin', 'src', 'share', 'classes', 'java', 'libjava.jar') maven_zip = os.path.join('bazel-bin', LIBRARY_NAME + '.zip') storage_path = LIBRARY_NAME + '/' + version # Upload the jar file with the library. destination = archive.GetUploadDestination(storage_path, LIBRARY_NAME + '.jar', is_master) Upload(options, library_jar, storage_path, destination, is_master) # Upload the maven zip file with the library. destination = archive.GetUploadDestination(storage_path, LIBRARY_NAME + '.zip', is_master) Upload(options, maven_zip, storage_path, destination, is_master) # Upload the jar file for accessing GCS as a maven repro. maven_destination = archive.GetUploadDestination( utils.get_maven_path('desugar_jdk_libs', version), 'desugar_jdk_libs-%s.jar' % version, is_master) if options.dry_run: print('Dry run, not actually creating maven repo') else: utils.upload_file_to_cloud_storage(library_jar, maven_destination) print('Maven repo root available at: %s' % archive.GetMavenUrl(is_master))
def test_is_not_bot(): user = Mock(first_name='Paul Smith') assert not utils.is_bot(user)
def Main(): (options, args) = ParseOptions() if utils.is_bot(): gradle.RunGradle(['--no-daemon', 'clean']) gradle_args = ['--stacktrace'] if utils.is_bot(): # Bots don't like dangling processes gradle_args.append('--no-daemon') # Set all necessary Gradle properties and options first. if options.shard_count: assert options.shard_number gradle_args.append('-Pshard_count=%s' % options.shard_count) gradle_args.append('-Pshard_number=%s' % options.shard_number) if options.verbose: gradle_args.append('-Pprint_test_stdout') if options.no_internal: gradle_args.append('-Pno_internal') if options.only_internal: gradle_args.append('-Ponly_internal') if options.all_tests: gradle_args.append('-Pall_tests') if options.tool: gradle_args.append('-Ptool=%s' % options.tool) if options.one_line_per_test: gradle_args.append('-Pone_line_per_test') if options.jctf: gradle_args.append('-Pjctf') if options.only_jctf: gradle_args.append('-Ponly_jctf') if options.jctf_compile_only: gradle_args.append('-Pjctf_compile_only') if options.disable_assertions: gradle_args.append('-Pdisable_assertions') if options.with_code_coverage: gradle_args.append('-Pwith_code_coverage') if os.name == 'nt': # temporary hack gradle_args.append('-Pno_internal') gradle_args.append('-x') gradle_args.append('createJctfTests') gradle_args.append('-x') gradle_args.append('jctfCommonJar') gradle_args.append('-x') gradle_args.append('jctfTestsClasses') if options.test_dir: gradle_args.append('-Ptest_dir=' + options.test_dir) if not os.path.exists(options.test_dir): os.makedirs(options.test_dir) if options.java_home: gradle_args.append('-Dorg.gradle.java.home=' + options.java_home) if options.java_max_memory_size: gradle_args.append('-Dorg.gradle.jvmargs=-Xmx' + options.java_max_memory_size) if options.generate_golden_files_to: gradle_args.append('-Pgenerate_golden_files_to=' + options.generate_golden_files_to) if not os.path.exists(options.generate_golden_files_to): os.makedirs(options.generate_golden_files_to) gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1()) if options.use_golden_files_in: gradle_args.append('-Puse_golden_files_in=' + options.use_golden_files_in) if not os.path.exists(options.use_golden_files_in): os.makedirs(options.use_golden_files_in) gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1()) if options.alternative_frontend: gradle_args.append('-Palternative_frontend') if (not options.no_r8lib) and options.r8lib_no_deps: print( 'Cannot run tests on r8lib with and without deps. R8lib is now default target.' ) exit(1) if not options.no_r8lib: gradle_args.append('-Pr8lib') # Force gradle to build a version of r8lib without dependencies for # BootstrapCurrentEqualityTest. gradle_args.append('R8LibNoDeps') if options.r8lib_no_deps: gradle_args.append('-Pr8lib_no_deps') # Build an R8 with dependencies for bootstrapping tests before adding test sources. gradle_args.append('r8WithRelocatedDeps') gradle_args.append('r8WithRelocatedDeps11') # Add Gradle tasks gradle_args.append('cleanTest') gradle_args.append('test') if options.fail_fast: gradle_args.append('--fail-fast') if options.failed: args = compute_failed_tests(args) if args is None: return 1 if len(args) == 0: print "No failing tests" return 0 # Test filtering. Must always follow the 'test' task. for testFilter in args: gradle_args.append('--tests') gradle_args.append(testFilter) if options.with_code_coverage: # Create Jacoco report after tests. gradle_args.append('jacocoTestReport') if options.use_golden_files_in: sha1 = '%s' % utils.get_HEAD_sha1() with utils.ChangedWorkingDirectory(options.use_golden_files_in): utils.download_file_from_cloud_storage( 'gs://r8-test-results/golden-files/%s.tar.gz' % sha1, '%s.tar.gz' % sha1) utils.unpack_archive('%s.tar.gz' % sha1) if utils.is_bot() and not utils.IsWindows(): timestamp_file = os.path.join(utils.BUILD, 'last_test_time') if os.path.exists(timestamp_file): os.remove(timestamp_file) gradle_args.append('-Pupdate_test_timestamp=' + timestamp_file) thread.start_new_thread(timeout_handler, (timestamp_file, )) rotate_test_reports() if options.only_jctf: # Note: not setting -Pruntimes will run with all available runtimes. return_code = gradle.RunGradle(gradle_args, throw_on_failure=False) return archive_and_return(return_code, options) # Now run tests on selected runtime(s). vms_to_test = [options.dex_vm] if options.dex_vm != "all" else ALL_ART_VMS for art_vm in vms_to_test: vm_suffix = "_" + options.dex_vm_kind if art_vm != "default" else "" runtimes = ['dex-' + art_vm] # Only append the "none" runtime and JVMs if running on the "default" DEX VM. if art_vm == "default": runtimes.extend(['jdk8', 'jdk9', 'jdk11', 'none']) return_code = gradle.RunGradle(gradle_args + [ '-Pdex_vm=%s' % art_vm + vm_suffix, '-Pruntimes=%s' % ':'.join(runtimes), ], throw_on_failure=False) if options.generate_golden_files_to: sha1 = '%s' % utils.get_HEAD_sha1() with utils.ChangedWorkingDirectory( options.generate_golden_files_to): archive = utils.create_archive(sha1) utils.upload_file_to_cloud_storage( archive, 'gs://r8-test-results/golden-files/' + archive) if return_code != 0: return archive_and_return(return_code, options) return 0
def Main(): (options, args) = ParseOptions() if not utils.is_bot() and not options.dry_run: raise Exception('You are not a bot, don\'t archive builds') if utils.is_bot(): SetRLimitToMax() PrintResourceInfo() # Create maven release which uses a build that exclude dependencies. create_maven_release.run(utils.MAVEN_ZIP) create_maven_release.run(utils.MAVEN_ZIP_LIB, is_r8lib=True) # Generate and copy a full build without dependencies. gradle.RunGradleExcludeDeps([utils.R8, utils.R8_SRC]) shutil.copyfile(utils.R8_JAR, utils.R8_FULL_EXCLUDE_DEPS_JAR) # Ensure all archived artifacts has been built before archiving. # The target tasks postfixed by 'lib' depend on the actual target task so # building it invokes the original task first. # The '-Pno_internal' flag is important because we generate the lib based on uses in tests. gradle.RunGradle([ utils.R8, utils.D8, utils.COMPATDX, utils.COMPATPROGUARD, utils.R8LIB, utils.R8LIB_NO_DEPS, utils.COMPATDXLIB, utils.COMPATPROGUARDLIB, '-Pno_internal' ]) version = GetVersion() is_master = IsMaster(version) if is_master: # On master we use the git hash to archive with print 'On master, using git hash for archiving' version = GetGitHash() destination = GetVersionDestination('gs://', version, is_master) if utils.cloud_storage_exists(destination) and not options.dry_run: raise Exception('Target archive directory %s already exists' % destination) with utils.TempDir() as temp: version_file = os.path.join(temp, 'r8-version.properties') with open(version_file, 'w') as version_writer: version_writer.write('version.sha=' + GetGitHash() + '\n') version_writer.write('releaser=go/r8bot (' + os.environ.get('SWARMING_BOT_ID') + ')\n') version_writer.write('version-file.version.code=1\n') for file in [ utils.D8_JAR, utils.R8_JAR, utils.R8LIB_JAR, utils.R8LIB_JAR + '.map', utils.R8_SRC_JAR, utils.R8_FULL_EXCLUDE_DEPS_JAR, utils.R8LIB_EXCLUDE_DEPS_JAR, utils.R8LIB_EXCLUDE_DEPS_JAR + '.map', utils.COMPATDX_JAR, utils.COMPATDXLIB_JAR, utils.COMPATDXLIB_JAR + '.map', utils.COMPATPROGUARD_JAR, utils.COMPATPROGUARDLIB_JAR, utils.COMPATPROGUARDLIB_JAR + '.map', utils.MAVEN_ZIP, utils.MAVEN_ZIP_LIB, utils.GENERATED_LICENSE, ]: file_name = os.path.basename(file) tagged_jar = os.path.join(temp, file_name) shutil.copyfile(file, tagged_jar) if file_name.endswith( '.jar') and not file_name.endswith('-src.jar'): with zipfile.ZipFile(tagged_jar, 'a') as zip: zip.write(version_file, os.path.basename(version_file)) destination = GetUploadDestination(version, file_name, is_master) print('Uploading %s to %s' % (tagged_jar, destination)) if options.dry_run: print('Dry run, not actually uploading') else: utils.upload_file_to_cloud_storage(tagged_jar, destination) print('File available at: %s' % GetUrl(version, file_name, is_master)) if file == utils.R8_JAR: # Upload R8 to a maven compatible location. maven_dst = GetUploadDestination(utils.get_maven_path(version), 'r8-%s.jar' % version, is_master) if options.dry_run: print('Dry run, not actually creating maven repo') else: utils.upload_file_to_cloud_storage(tagged_jar, maven_dst) print('Maven repo root available at: %s' % GetMavenUrl(is_master))
def Main(): (options, args) = ParseOptions() if utils.is_bot(): print "Result of 'java -version':" print subprocess.check_output(['java', '-version']) gradle.RunGradle(['clean']) gradle_args = ['--stacktrace'] # Set all necessary Gradle properties and options first. if options.verbose: gradle_args.append('-Pprint_test_stdout') if options.no_internal: gradle_args.append('-Pno_internal') if options.only_internal: gradle_args.append('-Ponly_internal') if options.all_tests: gradle_args.append('-Pall_tests') if options.tool: gradle_args.append('-Ptool=%s' % options.tool) if options.one_line_per_test: gradle_args.append('-Pone_line_per_test') if options.jctf: gradle_args.append('-Pjctf') if options.only_jctf: gradle_args.append('-Ponly_jctf') if options.jctf_compile_only: gradle_args.append('-Pjctf_compile_only') if options.aosp_jar: gradle_args.append('-Paosp_jar') if options.disable_assertions: gradle_args.append('-Pdisable_assertions') if options.with_code_coverage: gradle_args.append('-Pwith_code_coverage') if os.name == 'nt': # temporary hack gradle_args.append('-Pno_internal') gradle_args.append('-x') gradle_args.append('createJctfTests') gradle_args.append('-x') gradle_args.append('jctfCommonJar') gradle_args.append('-x') gradle_args.append('jctfTestsClasses') if options.test_dir: gradle_args.append('-Ptest_dir=' + options.test_dir) if not os.path.exists(options.test_dir): os.makedirs(options.test_dir) if options.java_home: gradle_args.append('-Dorg.gradle.java.home=' + options.java_home) if options.generate_golden_files_to: gradle_args.append('-Pgenerate_golden_files_to=' + options.generate_golden_files_to) if not os.path.exists(options.generate_golden_files_to): os.makedirs(options.generate_golden_files_to) gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1()) if options.use_golden_files_in: gradle_args.append('-Puse_golden_files_in=' + options.use_golden_files_in) if not os.path.exists(options.use_golden_files_in): os.makedirs(options.use_golden_files_in) gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1()) if (not options.no_r8lib) and options.r8lib_no_deps: print( 'Cannot run tests on r8lib with and without deps. R8lib is now default target.' ) exit(1) if not options.no_r8lib: gradle_args.append('-Pr8lib') # Force gradle to build a version of r8lib without dependencies for # BootstrapCurrentEqualityTest. gradle_args.append('R8LibNoDeps') if options.r8lib_no_deps: gradle_args.append('-Pr8lib_no_deps') # Build an R8 with dependencies for bootstrapping tests before adding test sources. gradle_args.append('r8WithRelocatedDeps') # Add Gradle tasks gradle_args.append('cleanTest') gradle_args.append('test') # Test filtering. Must always follow the 'test' task. for testFilter in args: gradle_args.append('--tests') gradle_args.append(testFilter) if options.with_code_coverage: # Create Jacoco report after tests. gradle_args.append('jacocoTestReport') if options.use_golden_files_in: sha1 = '%s' % utils.get_HEAD_sha1() with utils.ChangedWorkingDirectory(options.use_golden_files_in): utils.download_file_from_cloud_storage( 'gs://r8-test-results/golden-files/%s.tar.gz' % sha1, '%s.tar.gz' % sha1) utils.unpack_archive('%s.tar.gz' % sha1) if utils.is_bot() and not utils.IsWindows(): timestamp_file = os.path.join(utils.BUILD, 'last_test_time') if os.path.exists(timestamp_file): os.remove(timestamp_file) gradle_args.append('-Pupdate_test_timestamp=' + timestamp_file) thread.start_new_thread(timeout_handler, (timestamp_file, )) # Now run tests on selected runtime(s). vms_to_test = [options.dex_vm] if options.dex_vm != "all" else ALL_ART_VMS for art_vm in vms_to_test: vm_kind_to_test = "_" + options.dex_vm_kind if art_vm != "default" else "" return_code = gradle.RunGradle( gradle_args + ['-Pdex_vm=%s' % (art_vm + vm_kind_to_test)], throw_on_failure=False) if options.generate_golden_files_to: sha1 = '%s' % utils.get_HEAD_sha1() with utils.ChangedWorkingDirectory( options.generate_golden_files_to): archive = utils.create_archive(sha1) utils.upload_file_to_cloud_storage( archive, 'gs://r8-test-results/golden-files/' + archive) if return_code != 0: if options.archive_failures and os.name != 'nt': archive_failures() return return_code return 0