def run(self, args): configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] make_request('/' + str(inverted[connector_name]) + '/activate', 'PUT') send_slack_notif([{ 'title': 'Enable connectors', 'text': '- ' + connector_name }])
def run(self, args): configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] make_request('/' + str(inverted[connector_name]) + '/reindex', 'POST') send_slack_notif([{ 'title': 'Manually reindexed connectors', 'text': '- ' + connector_name }]) return 0
def run(self, args): from deployer.src.helpers import make_request from deployer.src.fetchers import get_configs_from_website from deployer.src.helpers import send_slack_notif configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] make_request('/' + str(inverted[connector_name]) + '/activate', 'PUT') send_slack_notif([{ 'title': 'Enable connectors', 'text': '- ' + connector_name }])
def run(self, args): from deployer.src.helpers import make_request from deployer.src.fetchers import get_configs_from_website from deployer.src.helpers import send_slack_notif configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] make_request('/{}/activate'.format(inverted[connector_name]), "PUT") send_slack_notif([{ "title": "Enable connectors", "text": "- " + connector_name }])
def run(self, args): from deployer.src.helpers import make_request from deployer.src.fetchers import get_configs_from_website from deployer.src.helpers import send_slack_notif configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] make_request('/{}/reindex'.format(inverted[connector_name]), "POST") send_slack_notif([{ "title": "Manually reindexed connectors", "text": "- " + connector_name }]) return 0
def run(self, args): from deployer.src.helpers import make_request from deployer.src.fetchers import get_configs_from_website from deployer.src.helpers import send_slack_notif from deployer.src.algolia_helper import remove_crawling_issue configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] make_request('/{}/deactivate'.format(inverted[connector_name]), "PUT") remove_crawling_issue(connector_name) send_slack_notif([{ "title": "Disable connectors", "text": "- " + connector_name }])
def run(self, args): configs, inverted, crawlers_id = get_configs_from_website() connector_name = args[0] scheduler_username = environ.get('SCHEDULER_USERNAME') scheduler_password = environ.get('SCHEDULER_PASSWORD') url = "https://" + scheduler_username + ":" + scheduler_password + "@crawlers.algolia.com/1/crawlers/" + str(crawlers_id[connector_name]) + "/logs" r = make_custom_get_request(url) logs = json.loads(r.content)['logs'] for log in reversed(logs): print(log['content']) print('') return 0
def run(self, args): from deployer.src.helpers import make_request from deployer.src.fetchers import get_configs_from_website from deployer.src.helpers import send_slack_notif from deployer.src.algolia_helper import remove_crawling_issue configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] make_request('/' + str(inverted[connector_name]) + '/deactivate', 'PUT') remove_crawling_issue(connector_name) send_slack_notif([{ 'title': 'Disable connectors', 'text': '- ' + connector_name }])
def run(self, args): configs, inverted, crawlers_id = get_configs_from_website() connector_name = args[0] scheduler_username = environ.get('SCHEDULER_USERNAME') scheduler_password = environ.get('SCHEDULER_PASSWORD') url = "https://" + scheduler_username + ":" + scheduler_password + "@crawlers.algolia.com/1/crawlers/" + str( crawlers_id[connector_name]) + "/logs" r = make_custom_get_request(url) logs = json.loads(r.content)['logs'] for log in reversed(logs): print(log['content']) print('') return 0
def run(self, args): import json from os import environ from deployer.src.helpers import make_custom_get_request from deployer.src.fetchers import get_configs_from_website configs, inverted, crawlers_id = get_configs_from_website() connector_name = args[0] scheduler_username = environ.get("SCHEDULER_USERNAME") scheduler_password = environ.get("SCHEDULER_PASSWORD") url = "https://" + scheduler_username + ":" + scheduler_password + "@crawlers.algolia.com/1/crawlers/" + str( crawlers_id[connector_name]) + "/logs" r = make_custom_get_request(url) logs = json.loads(r.content)["logs"] for log in reversed(logs): print(log["content"]) print("") return 0
def run(self, args): configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] make_request('/' + str(inverted[connector_name]) + '/reindex', 'POST') return 0