def compute_KDJ933(stock, theDate, close, high, low, real): baseKDJDateStr = stock['KDJ'].keys()[0] if dateutil.parse_date(baseKDJDateStr) == theDate: #print 'hit' return (stock['KDJ'][baseKDJDateStr][0], stock['KDJ'][baseKDJDateStr][1], stock['KDJ'][baseKDJDateStr][2]) (k_1, d_1, j_1) = get_previous_KDJ933(stock, dateutil.previous_date(theDate)) if not real: return (k_1, d_1, j_1) #print 'compute' + stock['code'] + ' ' + theDate.strftime('%Y-%m-%d') + ' ' + str(k_1) + ' ' + str(d_1) + " " + str(j_1) h9 = high l9 = low count = 1 while count < 9: theDate = dateutil.previous_date(theDate) datestr = dateutil.format_date(theDate) dh = previous_data_with_date(stock['code'], datestr) if dh['real']: count += 1 if l9 > dh['low']: l9 = dh['low'] if h9 < dh['high']: h9 = dh['high'] rsv = (close - l9) / (h9 - l9) * 100 #print "rsv " + str(rsv) + " l9 " + " " + str(l9) + " h9 " + str(h9) k = rsv / 3 + 2 * k_1 / 3 d = k / 3 + 2 * d_1 / 3 j = 3 * k - 2 * d return (k, d, j)
def get_commits(path, token): headers = { 'Accept': GITHUB_REQUEST_MEDIATYPE, 'Authorization': 'token ' + token, } resp = requests.get(GITHUB_COMMITS_URL, params={'path': path}, headers=headers, timeout=REQUEST_TIMEOUT) resp.raise_for_status() # Commits are in descending order, so we reverse them. commits = resp.json() commits.reverse() # Add the file path to the annotation for i, c in enumerate(commits): commits[i] = { 'sha': c['sha'], 'commit': c['commit'], 'file_path': path, 'timestamp': parse_date(c['commit']['committer']['date']), # noqa } return commits
def get_positions(self): from dateutil import parse_date return [ (position_id, parse_date(date_wgt.Value), self.tarif_codes[tarif_code_wgt.Children[0].StringSelection][0], tarif_code_wgt.Children[1].Value, float(quantity_wgt.Value), round(float(price_wgt.Value) * 100)) for position_id, date_wgt, _, tarif_code_wgt, quantity_wgt, price_wgt, _, _, _, _ in self._positions ]
def fetch_returns(dt_index, rshift, lshift=-1): res = {} for dt, date in zip(dt_index, dateutil.to_datestr(dt_index)): di, date = dateutil.parse_date(DATES, date, -1) if di - lshift < 0 or di + rshift + 1 > len(DATES): continue r = quote_fetcher.fetch_window('returns', DATES[di - lshift:di + rshift + 1]) res[dt] = (1 + r).cumprod().iloc[-1] - 1. res = pd.DataFrame(res).T return res
def fetch_dates(df, dt_index, rshift=0, lshift=0): df_dates = dateutil.to_datestr(dt_index) res = {} for dt, date in zip(dt_index, dateutil.to_datestr(dt_index)): try: di, date = dateutil.parse_date(df_dates, date, -1) assert di - lshift >= 0 and di + rshift + 1 <= len(df_dates) if rshift + lshift == 0: res[dt] = df.iloc[di - lshift] else: res[dt] = df.iloc[di - lshift:di + rshift + 1] except ValueError: pass except Exception, e: raise e
def get_commit(path, token, ref='master'): headers = { 'Accept': GITHUB_REQUEST_MEDIATYPE, 'Authorization': 'token ' + token, } resp = requests.get(GITHUB_COMMITS_URL, params={ 'path': path, 'sha': ref, }, headers=headers, timeout=REQUEST_TIMEOUT) resp.raise_for_status() c = resp.json()[0] return { 'sha': c['sha'], 'commit': c['commit'], 'file_path': path, 'timestamp': parse_date(c['commit']['committer']['date']), }
def generate(file_name, domain, model, commit): # The unique batch ID is the commit SHA. batch = commit['sha'] # Timestamp of when the entity became available. timestamp = parse_date(commit['commit']['committer']['date']) entities = [] # Committer and author may be the same person, but entities # are deduped downstream. committer = commit['commit']['committer'] committer = entity( { 'domain': domain, 'name': committer['email'], 'labels': ['Person', 'Agent'], 'attrs': committer, }, timestamp, batch=batch) author = commit['commit']['author'] author = entity( { 'domain': domain, 'name': author['email'], 'labels': ['Person', 'Agent'], 'attrs': author, }, timestamp, batch=batch) # The git commit that corresponds to the current state. The metadata # of the commit does not technically need to be copied here. A consumer # of this data could lookup the commit and fetch the data manually. commit = entity( { 'domain': domain, 'name': commit['sha'], 'labels': ['Commit'], 'attrs': { 'sha': commit['sha'], 'url': commit['commit']['url'], 'message': commit['commit']['message'], 'commit_time': commit['commit']['committer']['date'], 'author_time': commit['commit']['author']['date'], }, 'refs': { 'committer': committer.ident, 'author': author.ident, } }, timestamp, batch=batch) # The source file containing the data the model, tables, and fields # were extracted from. source_file = entity( { 'domain': domain, 'name': file_name, 'labels': ['File'], 'attrs': { 'path': file_name, }, 'refs': { 'commit': commit.ident, }, }, timestamp, batch=batch) service = entity( { 'domain': domain, 'name': 'pedsnet/etlconv', 'labels': ['Agent', 'Service'], 'attrs': { 'name': 'PEDSnet ETL Conventions Service', 'version': __version__, }, }, timestamp, batch=batch) # Append the entities. entities.append(service) entities.append(source_file) entities.append(commit) entities.append(author) entities.append(committer) # The base event is copied for each entity. This event describes # the relationship between the relationship between the Git commit # and the entity. base_event = { 'domain': domain, 'name': 'event_%s' % commit['name'], 'labels': ['Event', 'EntitiesExtracted'], 'attrs': { 'event': 'EntitiesExtracted', }, 'refs': { 'file': source_file.ident, 'service': service.ident, }, } model = dict(model) tables = model.pop('tables') model = entity( { 'domain': domain, 'name': model['name'], 'labels': ['Model'], 'attrs': model, }, timestamp, batch=batch) entities.append(model) event = entity(deepcopy(base_event), timestamp, batch=batch) event['refs']['entity'] = model.ident entities.append(event) for table_name, attrs in tables.items(): attrs = dict(attrs) fields = attrs.pop('fields') attrs['name'] = table_name table_id = table_name table = entity( { 'domain': domain, 'name': table_id, 'labels': ['Table'], 'attrs': attrs, 'refs': { 'model': model.ident, }, }, timestamp, batch=batch) entities.append(table) event = entity(deepcopy(base_event), timestamp, batch=batch) event['refs']['entity'] = table.ident entities.append(event) for field_name, attrs in fields.items(): attrs = dict(attrs) attrs['name'] = field_name field_id = '{}.{}'.format(table_name, field_name) field = entity( { 'domain': domain, 'name': field_id, 'labels': ['Field'], 'attrs': attrs, 'refs': { 'table': table.ident, 'model': model.ident, }, }, timestamp, batch=batch) entities.append(field) event = entity(deepcopy(base_event), timestamp, batch=batch) event['refs']['entity'] = field.ident entities.append(event) return entities
def generate(file_name, domain, model, commit): # The unique batch ID is the commit SHA. batch = commit['sha'] # Timestamp of when the entity became available. timestamp = parse_date(commit['commit']['committer']['date']) entities = [] # Committer and author may be the same person, but entities # are deduped downstream. committer = commit['commit']['committer'] committer = entity({ 'domain': domain, 'name': committer['email'], 'labels': ['Person', 'Agent'], 'attrs': committer, }, timestamp, batch=batch) author = commit['commit']['author'] author = entity({ 'domain': domain, 'name': author['email'], 'labels': ['Person', 'Agent'], 'attrs': author, }, timestamp, batch=batch) # The git commit that corresponds to the current state. The metadata # of the commit does not technically need to be copied here. A consumer # of this data could lookup the commit and fetch the data manually. commit = entity({ 'domain': domain, 'name': commit['sha'], 'labels': ['Commit'], 'attrs': { 'sha': commit['sha'], 'url': commit['commit']['url'], 'message': commit['commit']['message'], 'commit_time': commit['commit']['committer']['date'], 'author_time': commit['commit']['author']['date'], }, 'refs': { 'committer': committer.ident, 'author': author.ident, } }, timestamp, batch=batch) # The source file containing the data the model, tables, and fields # were extracted from. source_file = entity({ 'domain': domain, 'name': file_name, 'labels': ['File'], 'attrs': { 'path': file_name, }, 'refs': { 'commit': commit.ident, }, }, timestamp, batch=batch) service = entity({ 'domain': domain, 'name': 'pedsnet/etlconv', 'labels': ['Agent', 'Service'], 'attrs': { 'name': 'PEDSnet ETL Conventions Service', 'version': __version__, }, }, timestamp, batch=batch) # Append the entities. entities.append(service) entities.append(source_file) entities.append(commit) entities.append(author) entities.append(committer) # The base event is copied for each entity. This event describes # the relationship between the relationship between the Git commit # and the entity. base_event = { 'domain': domain, 'name': 'event_%s' % commit['name'], 'labels': ['Event', 'EntitiesExtracted'], 'attrs': { 'event': 'EntitiesExtracted', }, 'refs': { 'file': source_file.ident, 'service': service.ident, }, } model = dict(model) tables = model.pop('tables') model = entity({ 'domain': domain, 'name': model['name'], 'labels': ['Model'], 'attrs': model, }, timestamp, batch=batch) entities.append(model) event = entity(deepcopy(base_event), timestamp, batch=batch) event['refs']['entity'] = model.ident entities.append(event) for table_name, attrs in tables.items(): attrs = dict(attrs) fields = attrs.pop('fields') attrs['name'] = table_name table_id = table_name table = entity({ 'domain': domain, 'name': table_id, 'labels': ['Table'], 'attrs': attrs, 'refs': { 'model': model.ident, }, }, timestamp, batch=batch) entities.append(table) event = entity(deepcopy(base_event), timestamp, batch=batch) event['refs']['entity'] = table.ident entities.append(event) for field_name, attrs in fields.items(): attrs = dict(attrs) attrs['name'] = field_name field_id = '{}.{}'.format(table_name, field_name) field = entity({ 'domain': domain, 'name': field_id, 'labels': ['Field'], 'attrs': attrs, 'refs': { 'table': table.ident, 'model': model.ident, }, }, timestamp, batch=batch) entities.append(field) event = entity(deepcopy(base_event), timestamp, batch=batch) event['refs']['entity'] = field.ident entities.append(event) return entities