def get(self, id): menu_code = self.get_argument("m") strSql = "delete from user_info where id = %s" % id LOGGER.debug(strSql) last_rowid = self.db.execute_lastrowid(strSql) self.redirect("/user/list/?m="+menu_code) # class CleanHandler(BaseHandler): # # def get(self): # self.clear_cookie("userinfo") # user_list = self.db.query("select * from user_info order by id desc") # self.db.close() # # LOGGER.info(user_list) # self.render("user/userlist.html", user_list = user_list) # class JsonHandler(BaseHandler): # # def post(self): # #特别要注意,因为是响应的ajax的请求,返回数据类型需指定为json # self.set_header("Content-Type", "application/json") # LOGGER.info("---------------JsonHandler--------------------") # user_list = self.db.query("select * from user_info order by id desc") # self.db.close() # self.write(tornado.escape.json_encode(user_list))
def _save_versions_catalog(self): versions_url = os.path.join(ATK_REPOS_URL, 'version.json') try: response = requests.get(versions_url) except requests.exceptions.RequestException as e: LOGGER.error('Cannot building {} project. Cannot get versions catalog.'.format(self.name)) raise e self._versions_catalog = json.loads(response.text)
def _get_catalog_name_by_release_number(self, release_number): for key, value in self._versions_catalog.iteritems(): if value['release'] == int(release_number): return key LOGGER.warning( 'Unknown release number for {} project. Latest version will be used.' .format(self.name)) return LATEST_ATK_VERSION
def get(self): user_info = self.get_current_user() id = str(user_info['id']) strSql = "select id, send_id, rcv_id, message_title, message_txt, type, date_format(post_time, '%%Y %%m %%d %%H:%%i:%%s') post_time, status from message_info where rcv_id = "+id+" and TO_DAYS(now()) - TO_DAYS(post_time) <= 30 and status = 0" message_info = self.db.query(strSql) LOGGER.debug(message_info) if message_info: self.write(tornado.escape.json_encode(message_info)) return None
def extract_tar_file(self, dest_path, source_path=None): tar_path = source_path if source_path else self._local_tar_path self._local_sources_path = dest_path try: tar = tarfile.open(tar_path) tar.extractall(self._local_sources_path) tar.close() except Exception as e: LOGGER.error('Cannot extract tar file for {} project'.format(self.name)) raise e
def _save_versions_catalog(self): versions_url = os.path.join(ATK_REPOS_URL, 'version.json') try: response = requests.get(versions_url) except requests.exceptions.RequestException as e: LOGGER.error( 'Cannot building {} project. Cannot get versions catalog.'. format(self.name)) raise e self._versions_catalog = json.loads(response.text)
def get(self): menu_code = self.get_argument("m") strSql = "select a.id,a.user_code from user_info a left join staff_info b on a.id=b.user_id where b.user_id is null and a.type=1 order by a.id desc" user_list = self.db.query(strSql) LOGGER.debug(user_list) self.db.close() if not user_list: raise tornado.web.HTTPError(404) return None self.render("staff/staffadd.html", user_list = user_list, menu_code = menu_code)
def extract_tar_file(self, dest_path, source_path=None): tar_path = source_path if source_path else self._local_tar_path self._local_sources_path = dest_path try: tar = tarfile.open(tar_path) tar.extractall(self._local_sources_path) tar.close() except Exception as e: LOGGER.error('Cannot extract tar file for {} project'.format( self.name)) raise e
def check_menu(self,m): strSql = "select * from group_menu a left join user_info b on a.group_code = b.type where b.user_code='admin' and menu_code='"+m+"'" LOGGER.info("----------check_menu----------"+strSql) menus = self.db.get(strSql) self.db.close() if not menus: self.render("login.html", message="无权限访问") return None return "OK"
def create_deployable_zip(self, path_for_zip, sources_path=None, extra_files_paths=None): LOGGER.info('Creating zip package for {} project'.format(self.name)) if not os.path.exists(path_for_zip): os.makedirs(path_for_zip) project_files_path = sources_path if sources_path else self._local_sources_path try: for extra_file_path in extra_files_paths: shutil.copyfile(extra_file_path, os.path.join(project_files_path, ntpath.basename(extra_file_path))) if ntpath.basename(extra_file_path) == 'manifest.yml': app_manifest_path = os.path.join(project_files_path, ntpath.basename(extra_file_path)) with open(app_manifest_path, 'r') as f_stream: manifest_yml = yaml.load(f_stream) manifest_yml['applications'][0]['env']['VERSION'] = self._version_in_manifest with open(app_manifest_path, 'w') as f_stream: f_stream.write(yaml.safe_dump(manifest_yml)) except Exception as e: LOGGER.error('Cannot add extra files to {} project zip package'.format(self.name)) raise e path_for_zip = os.path.join(path_for_zip, self.zip_name + '.zip') if self.zip_name else os.path.join(path_for_zip, self.name + '.zip') try: deployable_zip = zipfile.ZipFile(path_for_zip, 'w') for root, dirs, files in os.walk(project_files_path): for file in files: deployable_zip.write(os.path.join(os.path.relpath(root, PLATFORM_PARENT_PATH), file), os.path.join(os.path.relpath(root, os.path.join(PLATFORM_PARENT_PATH, self.name)), file)) deployable_zip.close() except Exception as e: LOGGER.error('Cannot create zip package for {}'.format(self.name)) raise e LOGGER.info("Package for {} has been created".format(self.name))
def _download_tar_file(self, tar_name, version, dest_tar_path): LOGGER.info('Downloading {} in version {} for {} project'.format( tar_name, version, self.name)) self._local_tar_path = dest_tar_path if version.lower() == LATEST_ATK_VERSION: self._version_in_manifest = self._versions_catalog[ LATEST_ATK_VERSION]['release'] catalog_name_in_path = LATEST_ATK_VERSION else: self._version_in_manifest = version catalog_name_in_path = self._get_catalog_name_by_release_number( version) download_url = os.path.join(self.url, catalog_name_in_path, 'binaries', tar_name) try: response = requests.get(download_url) with open(dest_tar_path, 'wb') as tar: tar.write(response.content) except requests.exceptions.RequestException as e: LOGGER.error( 'Cannot download {} tar archive for {} project.'.format( tar_name, self.name)) raise e except IOError as e: LOGGER.error( 'Cannot save {} tar archive on your hard disk.'.format( tar_name)) raise e LOGGER.info( 'Tar archive for {} app from {} in version {} has been downloaded'. format(self.name, download_url, version))
def safe_log(message: str): """ Logs an INFO level message safely. Also prints it to the screen. Arguments: logger {logger} -- The logger to use. message {string} -- The message to log. """ try: indents = __get_indents(__get_callstack_indent_count()) if LOGGER is not None: LOGGER.log_info_message(indents + message) else: print('{} INFO: {}{}'.format(datetime.now(), indents, message)) except Exception: print('{}{}'.format(indents, message))
def build_sources(): builders = { 'source_downloader': Builder, 'go': GoBuilder, 'tool': ToolBuilder, 'universal': UniversalBuilder, 'atk': AtkBuilder, 'release_downloader': ReleaseDownloader } while apps_queue.empty() is not True: app = apps_queue.get() builder = builders[app['builder']](app) try: if app['builder'] == 'release_downloader': builder.download_release_zip(apps_output_path) elif app['builder'] != 'atk': builder.download_project_sources(snapshot=release_tag, url=os.path.join( constants.TAP_REPOS_URL, app['name'])) builder.build() destination_zip_path = tools_output_path if app[ 'builder'] == 'tool' else apps_output_path if app['builder'] == 'universal': zip_path = glob.glob('{0}/{0}*.zip'.format(app['name']))[0] shutil.copy(zip_path, destination_zip_path) else: builder.create_zip_package(destination_zip_path) threads_lock.acquire() refs_summary[builder.name] = builder.ref threads_lock.release() else: builder.download_project_sources(snapshot=atk_version, url=constants.ATK_REPOS_URL) builder.build() builder.create_deployable_zip( apps_output_path, extra_files_paths=[ os.path.join(constants.PLATFORM_PARENT_PATH, 'utils', app['name'], 'manifest.yml') ]) except Exception as e: LOGGER.error('Cannot build %s due to %s', app['name'], e) fails_lock.acquire() fails.append(app['name']) fails_lock.release()
def download_release_zip(self, dest_path): if not self.url: LOGGER.error('Not specified release url for %s', self.name) raise 'Not specified release url for {}'.format(self.name) LOGGER.info('Downloading release package for %s from %s', self.name, self.url) with open(self.build_log_path, 'a') as build_log, \ open(self.err_log_path, 'a') as err_log: try: subprocess.check_call(['wget', '-O', os.path.join(dest_path, '{}.zip'.format(self.name)), self.url], stdout=build_log, stderr=err_log) except Exception as e: LOGGER.error('Cannot download release package for %s project', self.name) raise e LOGGER.info('Release package has been downloaded for %s project', self.name)
def create_deployable_zip(self, path_for_zip, sources_path=None, extra_files_paths=None): LOGGER.info('Creating zip package for {} project'.format(self.name)) if not os.path.exists(path_for_zip): os.makedirs(path_for_zip) project_files_path = sources_path if sources_path else self._local_sources_path try: for extra_file_path in extra_files_paths: shutil.copyfile( extra_file_path, os.path.join(project_files_path, ntpath.basename(extra_file_path))) if ntpath.basename(extra_file_path) == 'manifest.yml': app_manifest_path = os.path.join( project_files_path, ntpath.basename(extra_file_path)) with open(app_manifest_path, 'r') as f_stream: manifest_yml = yaml.load(f_stream) manifest_yml['applications'][0]['env'][ 'VERSION'] = self._version_in_manifest with open(app_manifest_path, 'w') as f_stream: f_stream.write(yaml.safe_dump(manifest_yml)) except Exception as e: LOGGER.error( 'Cannot add extra files to {} project zip package'.format( self.name)) raise e path_for_zip = os.path.join(path_for_zip, self.zip_name + '.zip') if self.zip_name else os.path.join( path_for_zip, self.name + '.zip') try: deployable_zip = zipfile.ZipFile(path_for_zip, 'w') for root, dirs, files in os.walk(project_files_path): for file in files: deployable_zip.write( os.path.join( os.path.relpath(root, PLATFORM_PARENT_PATH), file), os.path.join( os.path.relpath( root, os.path.join(PLATFORM_PARENT_PATH, self.name)), file)) deployable_zip.close() except Exception as e: LOGGER.error('Cannot create zip package for {}'.format(self.name)) raise e LOGGER.info("Package for {} has been created".format(self.name))
def post(self): menu_code = self.get_argument("m") user_code = self.get_argument("user_code") user_name = self.get_argument("user_name") age = self.get_argument("age") marriage = self.get_argument("marriage") edu = self.get_argument("edu") join_date = self.get_argument("join_date") position = self.get_argument("position") dept = self.get_argument("dept") begin_year = self.get_argument("begin_year") loc_address = self.get_argument("loc_address") home_town = self.get_argument("home_town") home_address = self.get_argument("home_address") status = self.get_argument("status") emc_person = self.get_argument("emc_person") emc_phone = self.get_argument("emc_phone") strSql = "insert into staff_info(user_id, name, age, marriage, educational, join_date, position, department, begin_work_year, local_address, home_town, home_address, status, emergency_person, emergency_phone) values('"+user_code+"', '"+user_name+"', '"+age+"', '"+marriage+"', '"+edu+"', '"+join_date+"', '"+position+"', '"+dept+"', '"+begin_year+"', '"+loc_address+"', '"+home_town+"', '"+home_address+"', '"+status+"', '"+emc_person+"', '"+emc_phone+"')" LOGGER.debug(strSql) last_rowid = self.db.execute_lastrowid(strSql) LOGGER.debug(last_rowid) self.redirect("/staff/list/?m="+menu_code)
def build_sources(): builders = { 'source_downloader': Builder, 'go': GoBuilder, 'tool': ToolBuilder, 'universal': UniversalBuilder, 'atk': AtkBuilder, 'release_downloader': ReleaseDownloader } while apps_queue.empty() is not True: app = apps_queue.get() builder = builders[app['builder']](app) try: if app['builder'] == 'release_downloader': builder.download_release_zip(apps_output_path) elif app['builder'] != 'atk': builder.download_project_sources(snapshot=release_tag, url=os.path.join(constants.TAP_REPOS_URL, app['name'])) builder.build() destination_zip_path = tools_output_path if app['builder'] == 'tool' else apps_output_path if app['builder'] == 'universal': zip_path = glob.glob('{0}/{0}*.zip'.format(app['name']))[0] shutil.copy(zip_path, destination_zip_path) else: builder.create_zip_package(destination_zip_path) threads_lock.acquire() refs_summary[builder.name] = builder.ref threads_lock.release() else: builder.download_project_sources(snapshot=atk_version, url=constants.ATK_REPOS_URL) builder.build() builder.create_deployable_zip(apps_output_path, extra_files_paths=[os.path.join(constants.PLATFORM_PARENT_PATH, 'utils', app['name'], 'manifest.yml')]) except Exception as e: LOGGER.error('Cannot build %s due to %s', app['name'], e) fails_lock.acquire() fails.append(app['name']) fails_lock.release()
def build(self): LOGGER.info("Building {} project using godep".format(self.name)) with open(self.build_log_path, "a") as build_log, open(self.err_log_path, "a") as err_log: try: subprocess.check_call( ["godep", "go", "build", "./..."], cwd=self.sources_path, stdout=build_log, stderr=err_log ) except Exception as e: LOGGER.error("Cannot build {} project using godep".format(self.name)) raise e LOGGER.info("Building {} project using godep has been finished".format(self.name))
def build(self): LOGGER.info('Building %s project', self.name) with open(self.build_log_path, 'a') as build_log, \ open(self.err_log_path, 'a') as err_log: try: subprocess.check_call(['sh', 'pack.sh'], cwd=self.sources_path, stdout=build_log, stderr=err_log) except Exception as e: LOGGER.error('Cannot build {} project'.format(self.name)) raise e LOGGER.info('Building {} project has been finished'.format(self.name))
def create(db, send_id, rcv_id, message_title, message_txt, type, status): current_time = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())) LOGGER.debug("------------------"+str(current_time)) strSql = "insert into message_info (send_id, rcv_id, message_title, message_txt, type, post_time, status) values(%s, %s, '%s', '%s', %s, '%s', %s)" % (send_id,rcv_id,message_title,message_txt,type,current_time,status) LOGGER.debug(strSql) last_rowid = db.execute_lastrowid(strSql) if not last_rowid: return "KO" LOGGER.debug(last_rowid) return "OK"
def download_release_zip(self, dest_path): if not self.url: LOGGER.error('Not specified release url for %s', self.name) raise 'Not specified release url for {}'.format(self.name) LOGGER.info('Downloading release package for %s from %s', self.name, self.url) with open(self.build_log_path, 'a') as build_log, \ open(self.err_log_path, 'a') as err_log: try: subprocess.check_call([ 'wget', '-O', os.path.join(dest_path, '{}.zip'.format(self.name)), self.url ], stdout=build_log, stderr=err_log) except Exception as e: LOGGER.error('Cannot download release package for %s project', self.name) raise e LOGGER.info('Release package has been downloaded for %s project', self.name)
def build(self): LOGGER.info('Building {} project using godep'.format(self.name)) with open(self.build_log_path, 'a') as build_log, \ open(self.err_log_path, 'a') as err_log: try: subprocess.check_call(['godep', 'go', 'build', './...'], cwd=self.sources_path, stdout=build_log, stderr=err_log) except Exception as e: LOGGER.error('Cannot build {} project using godep'.format( self.name)) raise e LOGGER.info('Building {} project using godep has been finished'.format( self.name))
def _download_tar_file(self, tar_name, version, dest_tar_path): LOGGER.info('Downloading {} in version {} for {} project'.format(tar_name, version, self.name)) self._local_tar_path = dest_tar_path if version.lower() == LATEST_ATK_VERSION: self._version_in_manifest = self._versions_catalog[LATEST_ATK_VERSION]['release'] catalog_name_in_path = LATEST_ATK_VERSION else: self._version_in_manifest = version catalog_name_in_path = self._get_catalog_name_by_release_number(version) download_url = os.path.join(self.url, catalog_name_in_path, 'binaries', tar_name) try: response = requests.get(download_url) with open(dest_tar_path, 'wb') as tar: tar.write(response.content) except requests.exceptions.RequestException as e: LOGGER.error('Cannot download {} tar archive for {} project.'.format(tar_name, self.name)) raise e except IOError as e: LOGGER.error('Cannot save {} tar archive on your hard disk.'.format(tar_name)) raise e LOGGER.info('Tar archive for {} app from {} in version {} has been downloaded' .format(self.name, download_url, version))
def create_zip_package(self, dest_path, zip_name=None, zip_items=None): zip_name = zip_name if zip_name else self.zip_name LOGGER.info('Creating {} package for {} project'.format(zip_name, self.name)) try: if not os.path.exists(dest_path): os.makedirs(dest_path) if os.path.exists(os.path.join(dest_path, zip_name)): os.remove(os.path.join(dest_path, zip_name)) zip_package = zipfile.ZipFile(os.path.join(dest_path, zip_name), 'w') zip_items = zip_items if zip_items else self.zip_items if zip_items: zip_items_abs_paths = [] for item in zip_items: zip_items_abs_paths.append(os.path.join(self.sources_path, item)) for item in zip_items_abs_paths: if os.path.isdir(item): for root, dirs, files in os.walk(item): for file in files: if os.path.islink(os.path.join(root, file)): link_dest = os.readlink(os.path.join(root, file)) attr = zipfile.ZipInfo() attr.filename = os.path.relpath(os.path.join(root, file), self.sources_path) attr.create_system = 3 # local system code attr.external_attr = 2716663808L # symlink magic number zip_package.writestr(attr, link_dest) else: zip_package.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), self.sources_path)) else: zip_package.write(item, os.path.relpath(item, self.sources_path)) zip_package.close() except Exception as e: LOGGER.error('Cannot create zip package {} for {} project'.format(zip_name, self.name)) raise e LOGGER.info('Package for {} project has been created'.format(self.name))
def main(): global tools_output_path, apps_output_path, files_output_path, release_tag, atk_version, destination_path, refs_summary refs_summary = dict() args = parse_args() input_refs_file = dict() if args.refs_txt: try: with open(args.refs_txt, 'r') as stream: refs_txt_content = stream.read().split('\n') for item in refs_txt_content: item = item.split() if len(item): input_refs_file[item[0]] = item[1] except Exception: LOGGER.error('Cannot open refs.txt file.') if args.spec_version: for ver in args.spec_version: item = ver.split(':') input_refs_file[item[0]] = item[1] projects_names = load_app_yaml(constants.APPS_YAML_FILE_PATH) for app in projects_names['applications']: if 'snapshot' not in app: app['snapshot'] = input_refs_file[app['name']] if app['name'] in input_refs_file else None apps_queue.put(app) destination_path = args.destination if args.destination else constants.DEFAULT_DESTINATION_PATH tools_output_path = os.path.join(destination_path, 'tools') apps_output_path = os.path.join(destination_path, 'apps') files_output_path = os.path.join(destination_path, 'files') release_tag = args.release_tag if args.release_tag else None atk_version = args.atk_version if args.atk_version else constants.DEFAULT_ATK_VERSION if not os.path.exists(tools_output_path): os.makedirs(tools_output_path) if not os.path.exists(apps_output_path): os.makedirs(apps_output_path) if not os.path.exists(files_output_path): os.makedirs(files_output_path) for i in range(constants.CPU_CORES_COUNT): threads.append(threading.Thread(target=build_sources)) threads[i].start() for i in range(constants.CPU_CORES_COUNT): threads[i].join() with open(os.path.join(files_output_path, 'refs.txt'), 'w') as ref_file: for key, value in refs_summary.iteritems(): ref_file.write('{} {}\n'.format(key, value)) if fails: LOGGER.error('Cannot build platform packages!') for app_name in fails: LOGGER.error('%s project failed.', app_name) sys.exit(1) else: run_apployer_expand()
def download_project_sources(self, snapshot=None, url=None): self.snapshot = self.snapshot if self.snapshot else snapshot self.url = self.url if self.url else url with open(self.build_log_path, 'a') as build_log, \ open(self.err_log_path, 'a') as err_log: if os.path.exists(self.sources_path): LOGGER.info('Updating sources for {} project'.format(self.name)) try: subprocess.check_call(['git', 'checkout', 'master'], cwd=self.sources_path, stdout=build_log, stderr=err_log) subprocess.check_call(['git', 'pull'], cwd=self.sources_path, stdout=build_log, stderr=err_log) except Exception as e: LOGGER.error('Cannot update sources for {} project'.format(self.name)) raise e LOGGER.info('Sources for {} project has been updated'.format(self.name)) else: LOGGER.info('Downloading {} project sources'.format(self.name)) try: subprocess.check_call(['git', 'clone', self.url], cwd=PLATFORM_PARENT_PATH, stdout=build_log, stderr=err_log) except Exception as e: LOGGER.error('Cannot download sources for {} project'.format(self.name)) raise e LOGGER.info('Sources for {} project has been downloaded'.format(self.name)) if self.snapshot: LOGGER.info('Setting release tag {} for {} project sources'.format(self.snapshot, self.name)) try: subprocess.check_call(['git', 'checkout', self.snapshot], cwd=self.sources_path, stdout=build_log, stderr=err_log) except Exception: LOGGER.warning('Cannot set release tag {} for {} project sources. Using "master" branch.'.format(self.snapshot, self.name)) self.ref = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=self.sources_path).rstrip()
def __setitem__(self, index, value): """Override [i] = foo.""" LOGGER.info("Setting LED %s to %s", index, value) super().__setitem__(index, value)
#_*_ coding: utf-8 _*_ """Move all done cards to archive list in prior sprint board. Create new sprint board and move necessary lists from prior board to new board""" import sys sys.path.append('./lib') from lib.logger import LOGGER from lib.config import DOTTED_LINE, TEAM_INFO, DONE_LIST_NAME, ADMIN_USERS, BOARD_LISTS from lib.utils import compute_sprint_n, get_board_name, get_board_id, get_list_id, \ get_the_number_of_card, create_list, move_all_cards, create_board, \ get_labels_data, update_board_label, update_board_member, move_list, \ get_members_data, get_archive_name LOGGER.debug(DOTTED_LINE) LOGGER.info("Start creating new Sprint board for each team") for team in TEAM_INFO: team_info = TEAM_INFO[team] organ_name = team_info['organ_name'] start_ym = team_info['start_ym'] sprint_n = compute_sprint_n(start_ym) board_name = get_board_name(sprint_n, last_month=True) bid = get_board_id(organ_name, board_name) if bid: # move done cards in last month board done_list_id = get_list_id(bid, DONE_LIST_NAME) if done_list_id: n_card = get_the_number_of_card(done_list_id)
def main(): global tools_output_path, apps_output_path, files_output_path, release_tag, atk_version, destination_path, refs_summary refs_summary = dict() args = parse_args() input_refs_file = dict() if args.refs_txt: try: with open(args.refs_txt, 'r') as stream: refs_txt_content = stream.read().split('\n') for item in refs_txt_content: item = item.split() if len(item): input_refs_file[item[0]] = item[1] except Exception: LOGGER.error('Cannot open refs.txt file.') if args.spec_version: for ver in args.spec_version: item = ver.split(':') input_refs_file[item[0]] = item[1] projects_names = load_app_yaml(constants.APPS_YAML_FILE_PATH) for app in projects_names['applications']: if 'snapshot' not in app: app['snapshot'] = input_refs_file[ app['name']] if app['name'] in input_refs_file else None apps_queue.put(app) destination_path = args.destination if args.destination else constants.DEFAULT_DESTINATION_PATH tools_output_path = os.path.join(destination_path, 'tools') apps_output_path = os.path.join(destination_path, 'apps') files_output_path = os.path.join(destination_path, 'files') release_tag = args.release_tag if args.release_tag else None atk_version = args.atk_version if args.atk_version else constants.DEFAULT_ATK_VERSION if not os.path.exists(tools_output_path): os.makedirs(tools_output_path) if not os.path.exists(apps_output_path): os.makedirs(apps_output_path) if not os.path.exists(files_output_path): os.makedirs(files_output_path) for i in range(constants.CPU_CORES_COUNT): threads.append(threading.Thread(target=build_sources)) threads[i].start() for i in range(constants.CPU_CORES_COUNT): threads[i].join() with open(os.path.join(files_output_path, 'refs.txt'), 'w') as ref_file: for key, value in refs_summary.iteritems(): ref_file.write('{} {}\n'.format(key, value)) if fails: LOGGER.error('Cannot build platform packages!') for app_name in fails: LOGGER.error('%s project failed.', app_name) sys.exit(1) else: run_apployer_expand()
def _get_catalog_name_by_release_number(self, release_number): for key, value in self._versions_catalog.iteritems(): if value['release'] == int(release_number): return key LOGGER.warning('Unknown release number for {} project. Latest version will be used.'.format(self.name)) return LATEST_ATK_VERSION
def main(): LOGGER.info("Start....") tornado.options.parse_command_line() http_server = tornado.httpserver.HTTPServer(Application()) http_server.listen(options.port) tornado.ioloop.IOLoop.instance().start()
def fill(self, colour): """Pretend to fill the pixels.""" LOGGER.info("Filling with %s", colour) for index, _ in enumerate(self): self[index] = colour
#_*_ coding: utf-8 _*_ """Move all cards from done list to archive list""" import sys sys.path.append('./lib') from lib.config import DOTTED_LINE, TEAM_INFO, DONE_LIST_NAME from lib.logger import LOGGER from lib.utils import get_board_name, get_board_id, get_list_id, get_the_number_of_card, \ create_list, get_archive_name, move_all_cards, compute_sprint_n LOGGER.debug(DOTTED_LINE) LOGGER.info("Start moving done-list cards to archive-list") for team in TEAM_INFO: team_info = TEAM_INFO[team] start_ym = team_info['start_ym'] organ_name = team_info['organ_name'] sprint_n = compute_sprint_n(start_ym) board_name = get_board_name(sprint_n) bid = get_board_id(organ_name, board_name) if bid: done_list_id = get_list_id(bid, DONE_LIST_NAME) n_card = get_the_number_of_card(done_list_id) if done_list_id and n_card > 0: archive_list_name = get_archive_name() archive_list_id, existance = create_list(bid, archive_list_name) if existance: LOGGER.info("List(" + archive_list_name + ") is already in " + board_name) else: