def get_file_data(file_id): cl, al, fl = ([] for i in range(3)) retval = "WHERE F.id='{0}' RETURN PS,VSS,D,F".format(file_id) cquery = "{0} {1}".format(full_traversal,retval) res = process_cquery_http(cquery) size = 0 if 'size' in res[0]['F']: # some files with non-valid URLs can have no size size = res[0]['F']['size'] furl = extract_url(res[0]['F']) access_level = "open" if furl.startswith('Private data'): access_level = "controlled" sample_bs = res[0]['VSS']['body_site'] wf = "{0} -> {1}".format(sample_bs,res[0]['D']['node_type']) cl.append(CaseHits(project=Project(projectId=res[0]['PS']['project_subtype']),caseId=res[0]['VSS']['id'])) al.append(AssociatedEntities(entityId=res[0]['D']['id'],caseId=res[0]['VSS']['id'],entityType=res[0]['D']['node_type'])) fl.append(IndivFiles(fileId=res[0]['F']['id'])) a = Analysis(updatedDatetime="null",workflowType=wf,analysisId="null",inputFiles=fl) # can add analysis ID once node is present or remove if deemed unnecessary return FileHits( dataType=res[0]['F']['node_type'], fileName=furl, md5sum=res[0]['F']['md5'], dataFormat=res[0]['F']['format'], submitterId="null", state="submitted", access=access_level, fileId=res[0]['F']['id'], dataCategory=res[0]['F']['node_type'], experimentalStrategy=res[0]['F']['study'], fileSize=size, cases=cl, associatedEntities=al, analysis=a )
def process_client_telemetry(hostname): print("telemetry: " + (hostname)) if not request.json: return ("Waiting for json", 400) mydata = request.json t = Telemetry() #t.ts = mydata.get("ts") t.domain = mydata.get("domain") t.ip = mydata.get("ip") t.hostname = hostname t.filepath = mydata.get("filepath") t.hash = mydata.get("hash") db_session.add(t) exist_hash = Analysis.query.filter(Analysis.hash == mydata.get("hash")) if exist_hash.first() is None: a = Analysis() a.hash = mydata.get("hash") a.filepath = mydata.get("filepath") a.system = "vt" a.status = "new" db_session.add(a) db_session.commit() return ("Got telemetry", 200)
def process_client_file(hostname, hash): f = request.files['file'] f.save(os.path.join(UPLOADED_PATH, secure_filename(f.filename))) command_entry = Commands.query.filter(Commands.hash == hash) if command_entry.first() is not None: #command_entry.status = "done" db_session.delete(command_entry.first()) fin = open(UPLOADED_PATH + "/" + f.filename, 'rb') files = {'file': fin} r = requests.post( "http://sandbox.etp-research.info:8090/tasks/create/submit", files=files) print r task_ids = r.json()["task_ids"] print task_ids[0] #command_entry.result = "http://sandbox.etp-research.info:8000/analysis/"+task_ids[0]+"/summary" a = Analysis() a.hash = hash a.filepath = f.filename a.system = "cuckoo" a.status = "done" a.link = "http://sandbox.etp-research.info:8000/analysis/" + str( task_ids[0]) + "/summary" db_session.add(a) db_session.commit() return 'file uploaded successfully'
async def main(broker_account_id=None): balance = 10000 amount = 20 journal = Journal(mode='debug') traider = Traider(balance, journal, amount) analysis = Analysis(journal) stock = Stock(traider, journal) client = ti.AsyncClient(TOKEN, use_sandbox=True) data = await get_data(client, figi) print('Amount of data:', len(data)) df = pd.DataFrame(data) df.set_index('date', inplace=True) traider.trade(df, strategy=StrategyMACD_Day(loss_level=loss, profit_level=profit, macd_level=macd_level, target_stability=target_stability)) stock.interval_trade(df[-1:]) analysis.score() no_update = True async with ti.Streaming(TOKEN, receive_timeout=20, reconnect_timeout=10, heartbeat=20) as streaming: await streaming.candle.subscribe(figi, ti.CandleResolution.min5) async for event in streaming: if event.event == 'candle': pass # print(event.payload.figi, event.payload.c, event.payload.interval, event.payload.h, event.payload.time) if not (event.payload.time.strftime('%e-%m-%Y %H:%M') == data[-1]['date']) and no_update: no_update = False data = await get_data(client, figi) df = pd.DataFrame(data) df.set_index('date', inplace=True) traider.trade(df, strategy=StrategyMACD_Day( loss_level=loss, profit_level=profit, macd_level=macd_level, target_stability=target_stability)) stock.interval_trade(df[-1:]) analysis.score() elif event.payload.time.strftime( '%e-%m-%Y %H:%M') == data[-1]['date']: no_update = True
def import_analysis(self, name='', description='', details='', filepath='', params='', inputs='', outputs=''): ''' Imports the analysis module (present on the local path) written in Spark RDD language in the backend. The backend is a Swift Object Store. Parameters are needed to update the metadata. ''' filename = os.path.basename(filepath) created = datetime.now() user = getpass.getuser() checkMod = self.session.query(Analysis).from_statement(text("SELECT * FROM analysis where name=:name")).\ params(name=name).first() if (checkMod is None): analysisMod = Analysis(name=name, filepath=filename, description=description, details=details, created=created, user=user, parameters=params, inputs=inputs, outputs=outputs) shutil.copyfile(self.config['METADATA_LOCAL_PATH'], self.backup_metadata_path) # Backup metadata self.session.add(analysisMod) self.session.commit() # Upload the metadata and module to swift objs = [] if (self.backend == 'hdfs'): objs.append( (self.hdfsmodpath, self.config['METADATA_LOCAL_PATH'])) objs.append((self.hdfsmodpath, filepath)) elif (self.backend == 'swift'): objs.append(('sqlite.db', self.config['METADATA_LOCAL_PATH'])) objs.append((filename, filepath)) elif (self.backend == 'nfs'): objs.append((filename, filepath)) # Send only the module saveObjsBackend(objs, self.backend, self.config) else: raise RuntimeError("Analysis " + name + " already exists")
def analyze(request, project_id): project = get_object_or_404(Project, pk=project_id) result_analysis = code_analysis(project) analysis = Analysis() analysis.project = project analysis.pep8 = result_analysis['pep8']['percentage_errors'] analysis.pyflakes = result_analysis['pyflakes']['percentage_errors'] analysis.clonedigger = result_analysis['clonedigger']['percentage_errors'] analysis.jshint = result_analysis['jshint']['percentage_errors'] analysis.csslint = result_analysis['csslint']['percentage_errors'] analysis.result = result_analysis analysis.save() return HttpResponse('done')
profit = params['profit_level'] macd_level = params['macd_level'] target_stability = params['target_stability'] # client = ti.SyncClient(TOKEN, use_sandbox=True) # figi = sync_lists(client) for f in figi: print(f) data = data_all[data_all['figi'] == f] df = data[-700:] journal = Journal(mode='') traider = Traider(balance, journal, amount, figi=f) stock = Stock(traider, journal) analysis = Analysis(traider, journal) for i in range(data_limit_MACD, len(df)): data_ = df[:i + 1] # print(data_[-1:]) # print(talib.MACD(data_['Close'])) traider.trade(data_, strategy=StrategyMACD_Day( loss_level=loss, profit_level=profit, macd_level=macd_level, target_stability=target_stability)) # traider.trade(data_, strategy=StartegyBase()) stock.interval_trade(data_[i:i + 1]) # print(journal.get_orders())
def get_game_analysis(self, request): """Return history of in-game choices.""" games = Game.query().order(Game.user) if not games: raise endpoints.NotFoundException('Games not found.') return Analysis(analysis=[game.to_game_analysis() for game in games])
def create(self, request): analysis = Analysis(**request.data) analysis.save() return HttpResponse(analysis.to_json())