def check_file(args): file_path = args["<file>"] if os.path.isfile(file_path): return file_path Logger.error("File does not exist: %s" % file_path, sysexit=True)
def default_export(req): response = urllib2.urlopen(req) if response.code == 202: Logger.info('Export is queued, will retry in %ss' % EXPORT_QUERY_INTERVAL) time.sleep(EXPORT_QUERY_INTERVAL) return default_export(req) if response.code == 200: return response # Write response in a text file otherwise try: with open(ERROR_FILE_PATH, "wb") as error_file: error_file.write(response.read()) Logger.error("Unexpected response, content has been written in %s" % ERROR_FILE_PATH) except Exception as e: raise Exception( 'Unexpected HTTP response for export request, and cannot write ' 'response content in %s: %s' % (ERROR_FILE_PATH, e)) raise Exception( 'Unexpected HTTP response for export request, response content written in ' '%s' % ERROR_FILE_PATH)
def execute(args, options): login = args['--login'] file_path = check_file(args) csv_reader = CsvReader(file_path) csv_reader.open() game_count = csv_reader.rowCount if not args['--force']: sys.stdout.write( "You are about to delete %s games in you collection (%s), " "please enter the number of games displayed here to confirm you want to continue: " % (game_count, login)) if raw_input() != game_count.__str__(): Logger.error('Operation canceled, number does not match (should be %s).' % game_count, sysexit=True) return Logger.info("Deleting games for '%s' account..." % login) with WebDriver('collection-delete', args, options) as web_driver: if not LoginPage(web_driver.driver).authenticate(login, args['--password']): sys.exit(1) Logger.info("Deleting %s games..." % game_count) game_page = GamePage(web_driver.driver) csv_reader.iterate(lambda row: game_page.delete(row)) Logger.info("Deletion has finished.")
def execute(args, options): login = args['--login'] file_path = check_file(args) csv_reader = CsvReader(file_path) csv_reader.open() game_count = csv_reader.rowCount if not args['--force']: sys.stdout.write( "You are about to delete %s games in you collection (%s), " "please enter the number of games displayed here to confirm you want to continue: " % (game_count, login)) if raw_input() != game_count.__str__(): Logger.error( 'Operation canceled, number does not match (should be %s).' % game_count, sysexit=True) return Logger.info("Deleting games for '%s' account..." % login) with WebDriver('collection-delete', args, options) as web_driver: if not LoginPage(web_driver.driver).authenticate( login, args['--password']): sys.exit(1) Logger.info("Deleting %s games..." % game_count) game_page = GamePage(web_driver.driver) csv_reader.iterate(lambda row: game_page.delete(row)) Logger.info("Deletion has finished.")
def authenticate(self, login, password): """ Performs authentication :param login: BGG login :param password: BGG password """ Logger.info("Authenticating...", break_line=False) self.driver.get("%s/login" % BGG_BASE_URL) # When user is already authenticated, just skip this task # TODO Handle case where another user is logged in if self.is_authenticated(login): Logger.info(" (already logged) [done]", append=True) return True self.update_text(self.driver.find_element_by_id("login_username"), login) self.update_text(self.driver.find_element_by_id("login_password"), password) self.driver.find_element_by_xpath("//div[@class='menu_login']//input[@type='submit']")\ .click() if self.is_authenticated(login): Logger.info(" [done]", append=True) return True Logger.info(" [error]", append=True) Logger.error("Authentication failed, check your credentials!") return False
def check_file(args): file_path = args['<file>'] if os.path.isfile(file_path): return file_path Logger.error("File does not exist: %s" % file_path, sysexit=True)
def update_collid(opener, collid, fieldname, values): values.update({'ajax': 1, 'action': 'savedata', 'collid': collid, 'fieldname': fieldname}) values = { k:unicode(v).encode('utf-8') for k,v in values.iteritems() } response = opener.open(BGG_BASE_URL + '/geekcollection.php', urlencode(values)) if response.code != 200: Logger.error("Failed to update 'collid'=%s!" % collid, sysexit=True)
def execute(args, options): login = args['--login'] dest_path = args['<file>'] Logger.info("Exporting collection for '%s' account..." % login) # 1. Authentication with WebDriver('collection-export', args, options) as web_driver: if not LoginPage(web_driver.driver).authenticate( login, args['--password']): sys.exit(1) auth_cookie = web_driver.driver.get_cookie( BGG_SESSION_COOKIE_NAME)['value'] # 2. Export # Easier to rely on a client HTTP call rather than Selenium to download a file # Just need to pass the session cookie to get the full export with private information # Use XML2 API, see https://www.boardgamegeek.com/wiki/page/BGG_XML_API2#Collection # Default CSV export doesn't provide version info! url = '%s/xmlapi2/collection?username=%s&version=1&showprivate=1&stats=1' \ % (BGG_BASE_URL, login) req = urllib2.Request( url, None, {'Cookie': '%s=%s' % (BGG_SESSION_COOKIE_NAME, auth_cookie)}) # Get a BadStatusLine error most of times without this delay! # Related to Selenium, but in some conditions that I have not identified time.sleep(8) try: Logger.info('Launching export...') response = default_export(req) except Exception as e: Logger.error('Error while fetching export file!', e, sysexit=True) return # 3. Store XML file if requested xml_file = options.get('save-xml-file') if xml_file == 'true': xml_file_path = write_xml_file(response, dest_path) Logger.info("XML file save as %s" % xml_file_path) source = open(xml_file_path, 'rU') else: source = response # 4. Write CSV file try: write_csv(source, dest_path) except Exception as e: Logger.error('Error while writing export file in file system!', e, sysexit=True) return finally: source.close() # End Logger.info("Collection has been exported as %s" % dest_path)
def check(self): if 'objectid' not in self.reader.fieldnames: Logger.error("Cannot process the CSV file, it should contain at least a column named " "'objectid'! Provided columns: %s" % self.reader.fieldnames, sysexit=True) return unknown_fields = set(self.reader.fieldnames) - set(BGG_SUPPORTED_FIELDS) if unknown_fields: Logger.info('Some fields are not supported in your CSV file, they will be skipped: %s' % unknown_fields)
def execute(args, options): login = args['--login'] dest_path = args['<file>'] Logger.info("Exporting collection for '%s' account..." % login) # 1. Authentication with WebDriver('collection-export', args, options) as web_driver: if not LoginPage(web_driver.driver).authenticate(login, args['--password']): sys.exit(1) auth_cookie = web_driver.driver.get_cookie(BGG_SESSION_COOKIE_NAME)['value'] # 2. Export # Easier to rely on a client HTTP call rather than Selenium to download a file # Just need to pass the session cookie to get the full export with private information # Use XML2 API, see https://www.boardgamegeek.com/wiki/page/BGG_XML_API2#Collection # Default CSV export doesn't provide version info! url = '%s/xmlapi2/collection?username=%s&version=1&showprivate=1&stats=1' \ % (BGG_BASE_URL, login) req = urllib2.Request(url, None, {'Cookie': '%s=%s' % (BGG_SESSION_COOKIE_NAME, auth_cookie)}) # Get a BadStatusLine error most of times without this delay! # Related to Selenium, but in some conditions that I have not identified time.sleep(8) try: Logger.info('Launching export...') response = default_export(req) except Exception as e: Logger.error('Error while fetching export file!', e, sysexit=True) return # 3. Store XML file if requested xml_file = options.get('save-xml-file') if xml_file == 'true': xml_file_path = write_xml_file(response, dest_path) Logger.info("XML file save as %s" % xml_file_path) source = open(xml_file_path, 'rU') else: source = response # 4. Write CSV file try: write_csv(source, dest_path) except Exception as e: Logger.error('Error while writing export file in file system!', e, sysexit=True) return finally: source.close() # End Logger.info("Collection has been exported as %s" % dest_path)
def check(self): if 'objectid' not in self.reader.fieldnames: Logger.error( "Cannot process the CSV file, it should contain at least a column named " "'objectid'! Provided columns: %s" % self.reader.fieldnames, sysexit=True) return unknown_fields = set( self.reader.fieldnames) - set(BGG_SUPPORTED_FIELDS) if unknown_fields: Logger.info( 'Some fields are not supported in your CSV file, they will be skipped: %s' % unknown_fields)
def game_deleter(opener, row): collid = row['collid'] if not collid: return response = opener.open( BGG_BASE_URL + '/geekcollection.php', urlencode({ 'ajax': 1, 'action': 'delete', 'collid': collid })) if response.code != 200: Logger.error("Failed to delete 'collid'=%s!" % collid, sysexit=True)
def game_importer(opener, row, force_new=False): collid = row['collid'] if force_new or not collid: objectid = row['objectid'] if not objectid.isdigit(): Logger.error("Invalid 'objectid'=%s!" % objectid, sysexit=True) collid = create_collid(opener, objectid) values = { k:v for k,v in row.iteritems() if k in [ 'own', 'prevowned', 'fortrade', 'want', 'wanttobuy', 'wishlist', 'wishlistpriority', 'wanttoplay', 'preordered'] } if len(values): update_collid(opener, collid, 'status', values) values = { k:v for k,v in row.iteritems() if k in [ 'pp_currency', 'pricepaid', 'cv_currency', 'currvalue', 'quantity', 'acquisitiondate', 'acquiredfrom', 'privatecomment', 'invdate', 'invlocation'] } if len(values): update_collid(opener, collid, 'ownership', values) if '_versionid' in row.keys() and row['_versionid'].isdigit(): update_collid(opener, collid, 'version', {'geekitem_version': 1, 'objectid': row['_versionid']}) elif 'geekitem_version' in row.keys() and 'objectid' in row.keys() and \ row['geekitem_version'].isdigit() and int(row['geekitem_version']) == 1: update_collid(opener, collid, 'version', {'geekitem_version': 1, 'objectid': row['objectid']}) else: values = { k:v for k,v in row.iteritems() if k in [ 'imageid', 'publisherid', 'languageid', 'year', 'other', 'barcode'] } if len(values): update_collid(opener, collid, 'version', values) if 'objectname' in row.keys(): update_collid(opener, collid, 'objectname', {'value': row['objectname']}) if 'rating' in row.keys(): update_collid(opener, collid, 'rating', {'rating': row['rating']}) if 'weight' in row.keys(): update_collid(opener, collid, 'weight', {'weight': row['weight']}) if 'comment' in row.keys(): update_collid(opener, collid, 'comment', {'value': row['comment']}) if 'conditiontext' in row.keys(): update_collid(opener, collid, 'conditiontext', {'value': row['conditiontext']}) if 'wantpartslist' in row.keys(): update_collid(opener, collid, 'wantpartslist', {'value': row['wantpartslist']}) if 'haspartslist' in row.keys(): update_collid(opener, collid, 'haspartslist', {'value': row['haspartslist']}) if 'wishlistcomment' in row.keys(): update_collid(opener, collid, 'wishlistcomment', {'value': row['wishlistcomment']})
def execute_command(command, argv): timer_start = time.time() try: command_module = import_command_module(command) command_args, command_args_options = parse_commad_args(command_module, argv) if command_args: command_module.execute(command_args, command_args_options) show_duration(timer_start) except ImportError: exit_unknown_command(command) except WebDriverException as e: Logger.error(UI_ERROR_MSG, e) except Exception as e: Logger.error("Encountered an unexpected error, please report the issue to the author", e)
def create_collid(opener, objectid): response = opener.open(BGG_BASE_URL + '/geekcollection.php', urlencode({ 'ajax': 1, 'action': 'additem', 'force': 'true', 'objecttype': 'thing', 'objectid': objectid})) if response.code != 200: Logger.error("Failed to create item of 'objectid'=%s!" % objectid, sysexit=True) # There seems to be no straightforward way to get the collid of the item just created. # To work around this we fetch a list of all items of this objectid and scrape it to # find the largest collid. This might fail if the collection is concurrently modified. response = opener.open(BGG_BASE_URL + '/geekcollection.php?' + urlencode({ 'ajax': 1, 'action': 'module', 'objecttype': 'thing', 'objectid': objectid})) return max(int(m.group(1)) for m in re.finditer( r"(?i)<input\s+type='hidden'\s+name='collid'\s+value='(\d+)'[^>]*>", response.read()))
def execute(args): login = args['--login'] file_path = check_file(args) csv_reader = CsvReader(file_path) csv_reader.open() game_count = csv_reader.rowCount if not args['--force']: sys.stdout.write( "You are about to delete %s games in you collection (%s), " "please enter the number of games displayed here to confirm you want to continue: " % (game_count, login)) if raw_input() != game_count.__str__(): Logger.error( 'Operation canceled, number does not match (should be %s).' % game_count, sysexit=True) return Logger.info("Deleting games for '%s' account..." % login) cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) Logger.info("Authenticating...", break_line=False) opener.open( BGG_BASE_URL + '/login', urlencode({ 'action': 'login', 'username': login, 'password': args['--password'] })) if not any(cookie.name == "bggusername" for cookie in cj): Logger.info(" [error]", append=True) Logger.error("Authentication failed for user '%s'!" % login, sysexit=True) Logger.info(" [done]", append=True) Logger.info("Deleting %s games..." % game_count) csv_reader.iterate(lambda row: game_deleter(opener, row)) Logger.info("Deletion has finished.")
def iterate(self, callback): try: index = 1 for row in self.reader: objectid = row.get('objectid') ## if objectid is None or not objectid.isdigit(): ## Logger.error("No valid 'objectid' at line %s!" % index, None, sysexit=True) collid = row.get('collid') if collid is None or not (collid.isdigit() or (collid == "" and objectid.isdigit())): Logger.error("No valid 'collid'/'objectid' at line %s!" % index, None, sysexit=True) return # Encode in UTF-8 for key in row: value = row[key] if value is not None: row[key] = unicode(value, 'utf-8') objectname = row['objectname'] if objectname is None or objectname == "": ## objectname = "(name not available for objectid=%s)" % objectid objectname = "(name not available for objectid=%s, collid=%s)" % ( objectid, collid) Logger.info("[%s/%s] %s... " % (index, self.rowCount, objectname), break_line=False) try: callback(row) ## except WebDriverException as e: ## Logger.error(UI_ERROR_MSG, e, sysexit=True) ## return except URLError as e: Logger.error(UI_ERROR_MSG, e, sysexit=True) return except Exception as e: Logger.info("", append=True) Logger.error("Unexpected error while processing row %s" % index, e, sysexit=True) return Logger.info(" [done]", append=True) index += 1 except csv.Error as e: Logger.error('Error while reading file %s at line %d: %s' % (file, self.reader.line_num, e), sysexit=True)
def execute_command(command, argv): timer_start = time.time() try: command_module = import_command_module(command) ## command_args, command_args_options = parse_commad_args(command_module, argv) command_args = parse_commad_args(command_module, argv) if command_args: ## command_module.execute(command_args, command_args_options) command_module.execute(command_args) show_duration(timer_start) except ImportError: exit_unknown_command(command) ## except WebDriverException as e: except URLError as e: Logger.error(UI_ERROR_MSG, e) except Exception as e: Logger.error("Encountered an unexpected error, please report the issue to the author", e)
def default_export(req): response = urllib2.urlopen(req) if response.code == 202: Logger.info('Export is queued, will retry in %ss' % EXPORT_QUERY_INTERVAL) time.sleep(EXPORT_QUERY_INTERVAL) return default_export(req) if response.code == 200: return response # Write response in a text file otherwise try: with open(ERROR_FILE_PATH, "wb") as error_file: error_file.write(response.read()) Logger.error("Unexpected response, content has been written in %s" % ERROR_FILE_PATH) except Exception as e: raise Exception('Unexpected HTTP response for export request, and cannot write ' 'response content in %s: %s' % (ERROR_FILE_PATH, e)) raise Exception('Unexpected HTTP response for export request, response content written in ' '%s' % ERROR_FILE_PATH)
def execute(args): login = args['--login'] dest_path = args['<file>'] Logger.info("Exporting collection for '%s' account..." % login) # 1. Authentication cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) Logger.info("Authenticating...", break_line=False) opener.open(BGG_BASE_URL + '/login', urlencode({ 'action': 'login', 'username': login, 'password': args['--password']})) if not any(cookie.name == "bggusername" for cookie in cj): Logger.info(" [error]", append=True) Logger.error("Authentication failed for user '%s'!" % login, sysexit=True) Logger.info(" [done]", append=True) # 2. Export # Easier to rely on a client HTTP call rather than Selenium to download a file # Just need to pass the session cookie to get the full export with private information # Use XML2 API, see https://www.boardgamegeek.com/wiki/page/BGG_XML_API2#Collection # Default CSV export doesn't provide version info! url = BGG_BASE_URL + '/xmlapi2/collection?' + urlencode({ 'username': login, 'version': 1, 'showprivate': 1, 'stats': 1}) req = urllib2.Request(url) # Get a BadStatusLine error most of times without this delay! # Related to Selenium, but in some conditions that I have not identified time.sleep(8) try: Logger.info('Launching export...') response = default_export(opener, req) except Exception as e: Logger.error('Error while fetching export file!', e, sysexit=True) return # 3. Store XML file if requested if args['--save-xml-file']: xml_file_path = write_xml_file(response, dest_path) Logger.info("XML file save as %s" % xml_file_path) source = open(xml_file_path, 'rU') else: source = response # 4. Write CSV file try: write_csv(source, dest_path) except Exception as e: Logger.error('Error while writing export file in file system!', e, sysexit=True) return finally: source.close() # End Logger.info("Collection has been exported as %s" % dest_path)
def iterate(self, callback): try: index = 1 for row in self.reader: objectid = row.get('objectid') if objectid is None or not objectid.isdigit(): Logger.error("No valid 'objectid' at line %s!" % index, None, sysexit=True) return # Encode in UTF-8 for key in row: value = row[key] if value is not None: row[key] = unicode(value, 'utf-8') objectname = row['objectname'] if objectname is None or objectname == "": objectname = "(name not available for objectid=%s)" % objectid Logger.info("[%s/%s] %s... " % (index, self.rowCount, objectname), break_line=False) try: callback(row) except WebDriverException as e: Logger.error(UI_ERROR_MSG, e, sysexit=True) return except Exception as e: Logger.info("", append=True) Logger.error("Unexpected error while processing row %s" % index, e, sysexit=True) return Logger.info(" [done]", append=True) index += 1 except csv.Error as e: Logger.error('Error while reading file %s at line %d: %s' % (file, self.reader.line_num, e), sysexit=True)
def exit_error(msg, args): Logger.error(msg) Logger.error(docopt(__doc__, args)) exit(1)
if 'wantpartslist' in row.keys(): update_collid(opener, collid, 'wantpartslist', {'value': row['wantpartslist']}) if 'haspartslist' in row.keys(): update_collid(opener, collid, 'haspartslist', {'value': row['haspartslist']}) if 'wishlistcomment' in row.keys(): update_collid(opener, collid, 'wishlistcomment', {'value': row['wishlistcomment']}) def execute(args): login = args['--login'] file_path = check_file(args) csv_reader = CsvReader(file_path) csv_reader.open() Logger.info("Importing games for '%s' account..." % login) cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) Logger.info("Authenticating...", break_line=False) opener.open(BGG_BASE_URL + '/login', urlencode({ 'action': 'login', 'username': login, 'password': args['--password']})) if not any(cookie.name == "bggusername" for cookie in cj): Logger.info(" [error]", append=True) Logger.error("Authentication failed for user '%s'!" % login, sysexit=True) Logger.info(" [done]", append=True) Logger.info("Importing %s games..." % csv_reader.rowCount) csv_reader.iterate(lambda row: game_importer(opener, row, args['--force-new'])) Logger.info("Import has finished.")