def after_login(self, response): search_iterations_url = helper.build_url(BASE_SERVICE + CATEGORY.ITERATION, [], [FIELDS.ENDDATE, FIELDS.STARTDATE, \ FIELDS.ESTIMATE, FIELDS.ACTUAL, FIELDS.REMAINING, FIELDS.PROJECT], [PROJECTS.SEARCH]) yield scrapy.Request(search_iterations_url, callback = self.parse_iteration) control_iterations_url = helper.build_url(BASE_SERVICE + CATEGORY.ITERATION, [], [FIELDS.ENDDATE, FIELDS.STARTDATE, \ FIELDS.ESTIMATE, FIELDS.ACTUAL, FIELDS.REMAINING, FIELDS.PROJECT], [PROJECTS.CONTROL]) yield scrapy.Request(control_iterations_url, callback = self.parse_iteration) diff_iterations_url = helper.build_url(BASE_SERVICE + CATEGORY.ITERATION, [], [FIELDS.ENDDATE, FIELDS.STARTDATE, \ FIELDS.ESTIMATE, FIELDS.ACTUAL, FIELDS.REMAINING, FIELDS.PROJECT], [PROJECTS.DIFF]) yield scrapy.Request(diff_iterations_url, callback = self.parse_iteration)
def after_login(self, response): search_iterations_url = helper.build_url(BASE_SERVICE + CATEGORY.ITERATION, [], [FIELDS.ENDDATE, FIELDS.STARTDATE, \ FIELDS.ESTIMATE, FIELDS.ACTUAL, FIELDS.REMAINING, FIELDS.PROJECT], [PROJECTS.SEARCH]) yield scrapy.Request(search_iterations_url, callback=self.parse_iteration) control_iterations_url = helper.build_url(BASE_SERVICE + CATEGORY.ITERATION, [], [FIELDS.ENDDATE, FIELDS.STARTDATE, \ FIELDS.ESTIMATE, FIELDS.ACTUAL, FIELDS.REMAINING, FIELDS.PROJECT], [PROJECTS.CONTROL]) yield scrapy.Request(control_iterations_url, callback=self.parse_iteration) diff_iterations_url = helper.build_url(BASE_SERVICE + CATEGORY.ITERATION, [], [FIELDS.ENDDATE, FIELDS.STARTDATE, \ FIELDS.ESTIMATE, FIELDS.ACTUAL, FIELDS.REMAINING, FIELDS.PROJECT], [PROJECTS.DIFF]) yield scrapy.Request(diff_iterations_url, callback=self.parse_iteration)
def step_impl(context): url = helper.build_url(context.base_address, "player", context.player["playerid"]) with context.vcr.use_cassette("get_requests.json"): context.response = context.session.get(url=url)
def get_seo(request, url=None): url_parts = request.wepo.url_parts if url: url, url_parts = get_url_parts(url) # # for each part, from longest possible to 0 length # if not len(url_parts): # # return seo if in cache # cache_key = 'seo::/' seo = cache.get(cache_key) if seo: return seo seo = Seo.objects.filter(url='/') if seo: cache.set(cache_key, seo[0], 3600) return seo[0] else: for i in range(0, len(url_parts)): url = '' if i == 0: url = build_url(url_parts) else: url = build_url(url_parts, -i) # # return seo if in cache # cache_key = 'seo::%s' % url seo = cache.get(cache_key) if seo: return seo # # return seo if in db # seo = Seo.objects.filter(url=url) if seo: cache.set(cache_key, seo[0], 3600) return seo[0] return False
def step_impl(context): url = helper.build_url(context.base_address, "player", context.player["playerid"]) with context.vcr.use_cassette("delete_requests.json"): context.response = context.session.delete(url=url) context.logger.info(context.response)
def step_impl(context): url = helper.build_url(context.base_address, "match") data = {"winner": context.player, "loser": context.second_player} with context.vcr.use_cassette("put_requests.json"): context.response = context.session.put(url=url, json=data) context.logger.info(context.response.content)
def step_impl(context): url = helper.build_url(context.base_address, "player") data = helper.new_player() with context.vcr.use_cassette("post_another_requests.json"): context.response = context.session.post(url=url, json=data) try: context.second_player = context.response.json() except Exception: raise
def parse_iteration(self, response): iteration_dict = json.loads(response.body) project_id = helper.get_project_id_from_url(response.url) for iteration in iteration_dict["QueryResult"]["Results"]: #if "I16" in iteration["_refObjectName"] or "I17" in iteration["_refObjectName"]: # yield IterationInfo(iteration = iteration["_refObjectName"], startDate = iteration["StartDate"], endDate = iteration["EndDate"]) iteration_url = iteration["_ref"] iteration_query = "iteration = \"" + iteration_url + "\"" project_query = "project = \"Project/" + project_id + "\"" iteration_userstories_url = helper.build_url(BASE_SERVICE + CATEGORY.ARTIFACT, [TYPES.USERSTORY], \ [FIELDS.OWNER, FIELDS.REVISION, FIELDS.ESTIMATE, FIELDS.ACTUAL, FIELDS.REMAINING, FIELDS.PROJECT, FIELDS.ITERATION], [project_query, iteration_query]) yield scrapy.Request(iteration_userstories_url, callback = self.parse_userstory)
def step_impl(context): url = helper.build_url(context.base_address, "player") data = { "firstname": context.player["firstname"], "lastname": context.player["lastname"], "wins": context.player["wins"], "losses": context.player["losses"] } with context.vcr.use_cassette("get_requests.json"): context.response = context.session.get(url=url, json=data)
def step_impl(context): url = helper.build_url(context.base_address, "player", context.player["playerid"]) data = {} for row in context.table: data.append(row["key"], row["field"]) with context.vcr.use_cassette("put_requests.json"): context.response = context.session.delete(url=url, json=data) context.logger.info(context.response)
def step_impl(context): url = helper.build_url(context.base_address, "player", context.player["playerid"]) player_details = helper.new_player() data = {"changes": player_details} context.player = player_details with context.vcr.use_cassette("put_requests.json"): context.response = context.session.put(url=url, json=data) context.logger.info(context.response)
def step_impl(context): try: os.remove("cassettes/get_requests.json") except: pass url = helper.build_url(context.base_address, "player", context.player["playerid"]) with context.vcr.use_cassette("get_requests.json"): context.response = context.session.get(url=url) result = context.response.json()[0] print(result) print(context.second_player) assert result["wins"] == context.player["wins"] + 1
import helper import re TYPES = helper.enum(FEATURE="portfolioitem/feature", USERSTORY="hierarchicalrequirement") FIELDS = helper.enum(NAME="Name", OWNER="Owner", CHILDREN="Children") BASE_SERVICE = "https://rally1.rallydev.com/slm/webservice/v2.x/iteration" #query = ["Project = \"Project/15468059055\"", "iteration = \"iteration/24353200602\"", "iteration = \"iteration/24353200602\""] query = ["Project = \"Project/15468059055\""] base_url = helper.build_url(BASE_SERVICE, [], [], query) print base_url # query = "https://rally1.rallydev.com/slm/webservice/v2.x/artifact/123456/test" # print re.search(r'\d+', re.search(r'/\d+/', query).group()).group()
def step_impl(context): url = helper.build_url(context.base_address, "players", "leaderboard") with context.vcr.use_cassette("get_requests.json"): context.response = context.session.get(url=url)
def after_login(self, response): for project in projects_se: search_iterations_url = helper.build_url(BASE_SERVICE + CATEGORY.ITERATION, [], [FIELDS.ENDDATE, FIELDS.STARTDATE], [project]) yield scrapy.Request(search_iterations_url, callback = self.parse_iteration)
def after_login(self, response): # project id for search. query = "Project = \"Project/15468059055\"" feature_url = helper.build_url(BASE_SERVICE, [TYPES.FEATURE], [FIELDS.NAME, FIELDS.OWNER, FIELDS.CHILDREN], [query]) return scrapy.Request(feature_url, callback = self.parse_feature)