def remove_service(self, service_id, service_name): # delete service file with self.app.test_client() as client: data = dict( id=service_id ) response = client.post('/services/delete', data=data, follow_redirects=False) self.assertEqual(response.status_code, 200) self.assertEqual(response.get_json()['status'], 'success') # check service file and folder is removed unser final_path self.assertFalse(os.path.isfile(os.path.join(service_dir, service_name, service_name + '.py'))) # check models.txt is updated self.assertEqual([], ServiceModelsManager.get_models()) # check database try: db = create_session(self.app.config['DATABASE_URI'], drop_tables=False) service_row = db.query(Service.id, Service.name).first() result = (service_row is None) finally: db.remove() self.assertTrue(result) from database import db_engine self.assertFalse(db_engine.has_table('report_telnet')) # clean the service module, otherwise table won't be added to Base.metadata in next test del sys.modules['pipot.services.' + service_name + '.' + service_name]
def add_service(self, service_name, service_file_name): # upload the service file service_file = codecs.open(os.path.join(test_dir, 'testFiles', service_file_name), 'rb') # service_file = FileStorage(service_file) with self.app.test_client() as client: data = dict( file=service_file, description='test' ) response = client.post('/services', data=data, follow_redirects=False) self.assertEqual(response.status_code, 200) # check service file and folder is created under final_path self.assertTrue(os.path.isdir(os.path.join(service_dir, service_name))) self.assertTrue(os.path.isfile(os.path.join(service_dir, service_name, service_name + '.py'))) # check models.txt is updated self.assertEqual(['TelnetService.ReportTelnet'], ServiceModelsManager.get_models()) # check database try: db = create_session(self.app.config['DATABASE_URI'], drop_tables=False) service_row = db.query(Service.id, Service.name).first() service_id = service_row.id name = service_row.name finally: db.remove() from database import db_engine self.assertTrue(db_engine.has_table('report_telnet')) self.assertEqual(service_name, name) return service_id
def test_update_with_valid_notification_file(self): notification_name = 'TelegramNotification' notification_file_name = notification_name + '.py' # add a new discription column modified_notification_file_namae = 'ModifiedTelegramNotification/TelegramNotification.py' notification_id = self.add_notification(notification_name, notification_file_name) response = self.update_notification(notification_id, notification_name, modified_notification_file_namae) self.assertEqual(response.get_json()['status'], 'success') # check file content self.assertTrue( filecmp.cmp( os.path.join(notification_dir, notification_file_name), os.path.join(test_dir, 'testFiles', modified_notification_file_namae))) # check on database try: db = create_session(self.app.config['DATABASE_URI'], drop_tables=False) notification_row = db.query(Notification.id, Notification.name).first() notification_id = notification_row.id name = notification_row.name finally: db.remove() self.assertEqual(notification_name, name) self.remove_notification(notification_id, notification_name)
def main(): args = parse_arguments() origin = args.origin start_date = datetime.strptime(args.start_date, "%Y/%m/%d") end_date = datetime.strptime(args.end_date, "%Y/%m/%d") n_clusters = args.n_clusters output= args.output #get the cable data session = create_session(database_name, username, password) cables = session.query(Cable).filter(Cable.origin==origin, Cable.date>=start_date, Cable.date<end_date).all() nd = len(cables) #create the document vectors document_vectors = create_document_vectors(cables) #calculate term document frequencies df = calculate_term_frequencies(document_vectors) #calculate inverted term document frequencies idf = calculate_idf(df, nd) #create document-term matrix document_term_matrix = create_document_term_matrix(document_vectors, idf) #calculate clusters document_term_matrix = normalize(document_term_matrix) clusters = cluster_documents(n_clusters, document_term_matrix) #create report create_report(output, cables, clusters)
def App_alarm(environ, start_response): try: _, door = environ['PATH_INFO'].split('/') request_length = int(environ.get('CONTENT_LENGTH', 0)) body = environ['wsgi.input'].read(request_length).decode('UTF-8') request_body = json.loads(body) except ValueError as e: return bad_req(start_response) try: msg = request_body['message'] except KeyError: return bad_req(start_response) s = db.create_session(config.db) r = db.Alarm(type=msg, door_name=door) s.add(r) try: s.commit() except: return bad_req(start_response) m = 'ALARM: ' + str(r) print(m) for recv in config.sms_receivers: sms(config.sms_device, recv, m) start_response("200 OK", []) return ([])
def add_notification(self, notification_name, notification_file_name, install_mock): # upload the notification file notification_file = codecs.open( os.path.join(test_dir, 'testFiles', notification_name, notification_file_name), 'rb') # notification_file = FileStorage(notification_file) with self.app.test_client() as client: data = dict(file=notification_file, description='test') response = client.post('/notifications', data=data, follow_redirects=False) self.assertEqual(response.status_code, 200) install_mock.assert_called_once() # check backup notification file is removed under temp_path self.assertFalse( os.path.isfile( os.path.join(notification_dir, 'temp', notification_file_name))) # check notification file and folder is created under final_path self.assertTrue( os.path.isfile( os.path.join(notification_dir, notification_name + '.py'))) # check database try: db = create_session(self.app.config['DATABASE_URI'], drop_tables=False) notification_row = db.query(Notification.id, Notification.name).first() notification_id = notification_row.id name = notification_row.name finally: db.remove() self.assertEqual(notification_name, name) return notification_id
def App_alarm(environ,start_response): try: _,door=environ['PATH_INFO'].split('/') request_length=int(environ.get('CONTENT_LENGTH',0)) body=environ['wsgi.input'].read(request_length).decode('UTF-8') request_body = json.loads(body) except ValueError as e: return bad_req(start_response) try: msg=request_body['message'] except KeyError: return bad_req(start_response) s=db.create_session(config.db) r=db.Alarm(type=msg,door_name=door) s.add(r) try: s.commit() except: return bad_req(start_response) m='ALARM: '+str(r) print(m) for recv in config.sms_receivers: sms(config.sms_device,recv,m) start_response("200 OK",[]) return([])
def _install_notification_service_thread(cls, update, description): """ Installs the necessary dependencies for the notification services :param cls: The instance of the notification service. :type cls: class :return: void :rtype: void """ from run import app apt_deps = getattr(cls, 'get_apt_dependencies')() if len(apt_deps) > 0: apt = ['apt-get', '-q', '-y', 'install'] apt.extend(apt_deps) print('Calling %s' % " ".join(apt)) # Call apt-get install _ph = subprocess.Popen(apt) _ph.wait() pip_deps = getattr(cls, 'get_pip_dependencies')() if len(pip_deps) > 0: pip = ['pip', 'install'] pip.extend(pip_deps) print('Calling %s' % " ".join(pip)) _ph = subprocess.Popen(pip) _ph.wait() getattr(cls, 'after_install_hook')() if not update: instance = cls(getattr(cls, 'get_extra_config_sample')()) notification = Notification(instance.__class__.__name__, description) db = create_session(app.config['DATABASE_URI']) db.add(notification) db.commit() db.close()
def add_users(self): session = create_session() from api.services.user_service import get_user_by_query, create_user from api.services.user_roles_service import get_role_by_username, insert_into_user_role from api.services.roles_service import get_role_by_name users = self.data.get("users") for user in users: username = user['username'] email = user['email'] kwargs = {'username': username, 'email': email} user_q = get_user_by_query(session, **kwargs) if user_q is None: user_data = { 'email': email, "username": username, "password": user["password"], "status": user["status"] } user_q = create_user(session, user_data) user_roles_q = get_role_by_username(session, username) if user_roles_q is None: role_name = user['role'] role_q = get_role_by_name(session, role_name) user_id = user_q.id role_id = role_q.id data = {'user_id': user_id, 'role_id': role_id} insert_into_user_role(session, data) session.close()
def initialize_storage(self, db_args): """ Initializes DB tables and sets up connection. Creates a SQLAlchemy "session" object to be used for queries. Returns (bool): True|False for status of operation """ tag = "%s.initialize_storage" % (self.cname) status = False port = db_args.get('port', database.PORT) self.log("DEBUG", "%s: Connecting to database..." % (tag)) engine = database.create_db_engine(db_args) ## store the db-engine as object-attr for full resource_cleanup() self.db = engine ## Create database session and store as object-attr self.log("DEBUG", "%s: Creating DB session..." % (tag)) try: self.session = database.create_session(engine) status = True except Exception as e: raise self.session.close() ## Perform a count() records query to validate the connection and verify data exists count = self.session.query(database.CveRecord.id).count() if not count: raise CveStorageError("%s: Zero CVE records stored" % (tag)) self.log("INFO", "%s: CVE records in database: (%d)" % (tag, count)) return status
def open(self, door_name, data): card_uid = data.get('card', '') print('open with Card: "{}"'.format(card_uid)) s = db.create_session(config.db) try: card = s.query( db.Card).filter(db.Card.uid == card_uid.upper()).one() except exc.NoResultFound: return self.fail_open(s, card_uid, door_name) if not card.allow_entry or card.expiry_date < date.today(): return self.fail_open(s, card_uid, door_name) if self.status_manager.open: return self.grant_open(s, card_uid, door_name) if card.allow_unlock: pin = data.get('pin', 'no pin') sneaky = False if pin.startswith('#'): sneaky = True pin = pin[1:] if not card.allow_sneaky: sneaky = False if card.always_sneaky: sneaky = True if card.pin == pin: return self.grant_open(s, card_uid, door_name, sneaky) else: self.alarm_door(door_name, 'pin') return return self.fail_open(s, card_uid, door_name)
def display_registered_sensors(sensor_): sensors = dict() with create_session() as session: for sensor in session.query(sensor_).order_by( sensor_.place_id.asc()).all(): if sensor: sensors[sensor.id] = sensor return sensors
def filter_sensors(filter_form, sensor_): sensors = dict() with create_session() as session: for sensor in session.query(sensor_).filter_by( place_id=filter_form.place_name.data).all(): if sensor: sensors[sensor.id] = sensor return sensors
def save_expense(expense_map): session = database.create_session() id = session.execute('SELECT id from expenses') session.execute( 'INSERT INTO expense(username, category, description, price, record_ts) VALUES(%s, %s, %s, %s, %s);', (expense_map['username'], expense_map['category'], expense_map['description'], expense_map['price'], expense_map['timestamp']))
def get_current_user_role(): with create_session() as session: try: get_role = session.query( User.role).filter_by(email=current_user.email).first() return "".join(get_role) except AttributeError: return None
def before_request(): g.menu_entries = {} g.db = create_session(app.config['DATABASE_URI']) g.mailer = Mailer( app.config.get('EMAIL_DOMAIN', ''), app.config.get('EMAIL_API_KEY', ''), 'CCExtractor.org CI Platform' ) g.version = "0.1" g.log = log g.github = get_github_config(app.config)
def get_people_on_site(record, user_): working_people = [] with create_session() as session: for human in session.query(record.user_id, func.count(record.user_id)).group_by(record.user_id).filter_by( is_registered=True): if not human[1] % 2 == 0: for user in session.query(user_).filter_by(card_id=human[0]).all(): working_people.append("{} {}".format(user.name, user.surname)) return working_people
def before_request(): g.menu_entries = {} g.db = create_session(app.config['DATABASE_URI']) g.mailer = Mailer(app.config.get('EMAIL_DOMAIN', ''), app.config.get('EMAIL_API_KEY', ''), 'CCExtractor.org CI Platform') g.version = "0.1" g.log = log g.github = get_github_config(app.config)
def App_request(environ,start_response): try: _,door,req=environ['PATH_INFO'].split('/') request_length=int(environ.get('CONTENT_LENGTH',0)) body=environ['wsgi.input'].read(request_length).decode('UTF-8') request_body = json.loads(body) except ValueError as e: return bad_req(start_response) try: card_uid=request_body['card'] except KeyError: return bad_req(start_response) pin=request_body.get('pin','') new_pin=request_body.get('new_pin','') s=db.create_session(config.db) if req=='close': if request_body.get('write_log',False): log_success(s,card_uid,'close',door) return ok(start_response,'close') if req=='open': try: card=s.query(db.Card).filter(db.Card.uid==card_uid.upper()).one() except exc.NoResultFound: log_failure(s,card_uid,'open',door) return ok(start_response,'deny') if card.access_level<5 or card.expiry_date<date.today(): log_failure(s,card_uid,'open',door) return ok(start_response,'deny') if get_status(s).req_type=='open': log_success(s,card_uid,'open',door) return ok(start_response,'open') if card.access_level>=10: if card.pin==pin: log_success(s,card_uid,'open',door) return ok(start_response,'open') else: return ok(start_response,'require_pin') log_failure(s,card_uid,'open',door) return ok(start_response,'deny') if req=='change_pin': try: card=s.query(db.Card).filter(db.Card.uid==card_uid.upper()).one() except exc.NoResultFound: return bad_req(start_response,{'message':'Unknown uid','uid':card_uid.upper()}) if card.pin!=pin: return bad_req(start_response,{'message':'wrong pin','uid':card_uid.upper()}) if len(new_pin)!=len(pin): return bad_req(start_response,{'message':'wrong new pin length','uid':card_uid.upper()}) card.pin=new_pin s.merge(card) s.commit() card=s.query(db.Card).filter(db.Card.uid==card_uid.upper()).one() return ok(start_response,'ack') return bad_req(start_response,{'message':'unknown request'})
def run(): from mod_home.models import CCExtractorVersion, GeneralData from mod_regression.models import Category, RegressionTest, InputType, OutputType, RegressionTestOutput from mod_sample.models import Sample from mod_upload.models import Upload from mod_auth.models import User from database import create_session db = create_session(sys.argv[1]) entries = [] categories = [ Category('Broken', 'Samples that are broken'), Category('DVB', 'Samples that contain DVB subtitles'), Category('DVD', 'Samples that contain DVD subtitles'), Category('MP4', 'Samples that are stored in the MP4 format'), Category('General', 'General regression samples') ] entries.extend(categories) samples = [ Sample('sample1', 'ts', 'sample1'), Sample('sample2', 'ts', 'sample2') ] entries.extend(samples) cc_version = CCExtractorVersion( '0.84', '2016-12-16T00:00:00Z', '77da2dc873cc25dbf606a3b04172aa9fb1370f32') entries.append(cc_version) regression_tests = [ RegressionTest(1, '-autoprogram -out=ttxt -latin1', InputType.file, OutputType.file, 3, 10), RegressionTest(2, '-autoprogram -out=ttxt -latin1 -ucla', InputType.file, OutputType.file, 1, 10) ] entries.extend(regression_tests) gen_data = GeneralData('last_commit', '71dffd6eb30c1f4b5cf800307de845072ce33262') entries.append(gen_data) regression_test_output = [ RegressionTestOutput(1, "test1", "srt", "test1.srt"), RegressionTestOutput(2, "test2", "srt", "test2.srt") ] entries.extend(regression_test_output) for entry in entries: try: db.add(entry) db.commit() except IntegrityError: print("Entry already exists!", entry, flush=True) db.rollback()
def getAll(**kwargs): session = create_session() token = kwargs.get("token_info").get("token") user_q = decode_auth_token(session, token) if not user_q.is_admin: return "No autorizado", 401 user_schema = UserSchema(exclude=["password", "id"]) user_q_list = get_all_users_by_kwargs(session) data = user_schema.dump(user_q_list, many=True) return data, 200
def tearDown(self): from database import Base try: db = create_session(self.app.config['DATABASE_URI'], drop_tables=False) db_engine = create_engine(self.app.config['DATABASE_URI'], convert_unicode=True) Base.metadata.drop_all(bind=db_engine) finally: db.remove()
def delete(**kwargs): session = create_session() token = kwargs.get("token_info").get("token") user_q = decode_auth_token(session, token) data = {'id': user_q.id, 'online': False} logged = update_user(session, **data) session.close() if not logged: return "Bad Request", 400 return "", 204
def create_roles(self): session = create_session() roles = self.data.get("roles") from api.services.roles_service import get_role_by_name, insert_role for role in roles: name = role['name'] role_q = get_role_by_name(session, name) if role_q is None: insert_role(session, role) session.close()
def render_survey(id, survey_id=0): # check if the id is a valid id if invite only # if there's no session with this id create it try: if survey_id != 0: db.create_session(id, survey_id) except: pass # get the session try: session = db.get_session(id) except: return render("/templates", "error.html", errormessage="Ongeldige toegangscode") # load the current question question = db.get_question(session["survey_id"], session["question_ordernumber"]) # get options if needed options = {} if question["type"] in {"1", "M"}: options = db.get_options(question["id"]) # get the template for the question type template = get_template(question["type"]) # load response of question if any response = db.get_response(id, question["id"]) if question["type"] == "M": if response != None and len(response) > 0: response = map(int, response.split(",")) else: response = [] # render question return render("/templates", template, session_id=id, question=question, options=options, response=response)
def ok(self): from Director.Director import InsertDirectorAPI from database import create_debug_engine, create_session de = create_debug_engine(True) session = create_session(de) self.api = InsertDirectorAPI() self.data = self.line() if self.data[0] == "": return self.view.close() session.add(self.api.insertUser(self.data[0],self.data[1],self.data[2],self.data[3],self.data[4])) session.commit() return self.view.close()
def login(): form = LoginForm() if form.validate_on_submit(): with create_session() as session: user = session.query(User).filter_by(email=form.email.data).first() if user and check_password_hash(user.password, form.password.data): login_user(user) flash("You're now logged in", "success") return redirect(url_for('home')) else: flash("Invalid email or password", "error") return render_template("login.html", form=form)
def close(self, door_name, data): card_uid = data.get('card', '') self.status_manager.open = False self.status_manager.occupied = False self.status_manager.public = False self.alarm_door(door_name, 'close') r = db.Request_Success(card_uid=card_uid, req_type='close', door_name=door_name) s = db.create_session(config.db) s.add(r) s.commit()
def invite_participant(survey_id, participant_id): # check if the participant is not already invited c = db.query( "select count(*) as c from survey_participant where survey_id = %(survey_id)s and participant_id = %(participant_id)s", { 'survey_id': survey_id, 'participant_id': participant_id })[0]["c"] # of not invited than make a session, send a mail and register as invited if c == 0: # make session session_id = str(uuid4()) db.create_session(session_id, survey_id) # register invited db.update( "insert into survey_participant (survey_id, participant_id) values (%(survey_id)s, %(participant_id)s)", { 'survey_id': survey_id, 'participant_id': participant_id }) return session_id
def test_honeypot_deployment(self): # create service, profile, delpoyment try: db = create_session(self.app.config['DATABASE_URI'], drop_tables=False) profile = Profile(name='test-profile', description="test") db.add(profile) db.commit() profile_id = profile.id finally: db.remove() name = 'test_delpoyment' with self.app.test_client() as client: data = dict(profile_id=profile_id, name='test_delpoyment', rpi_model='one', server_ip='127.0.0.1', interface='eth0', debug=True, hostname='admin', rootpw='123', collector_type='tcp', wlan_configuration='') response = client.post('manage', data=data, follow_redirects=False, headers=[('X-Requested-With', 'XMLHttpRequest')]) self.assertEqual(response.status_code, 200) self.assertEqual(response.get_json()['status'], 'success') # check the deloyment is created is the database try: db = create_session(self.app.config['DATABASE_URI'], drop_tables=False) deloyment_instance = db.query(Deployment.name).first() delpoyment_name = deloyment_instance.name finally: db.remove() self.assertEqual(delpoyment_name, name)
def cron(): from mod_ci.controllers import start_platform from run import config, log from database import create_session from github import GitHub log.info('Run the cron for kicking off CI platform(s).') # Create session db = create_session(config['DATABASE_URI']) gh = GitHub(access_token=config['GITHUB_TOKEN']) repository = gh.repos(config['GITHUB_OWNER'])(config['GITHUB_REPOSITORY']) start_platform(db, repository)
def process_record(record, user, sensor, request, response, if_sensor_registered, if_uid_registered): data = request.json with create_session() as session: record = record(gateway_id=data["gateway_id"], sensor_id=data["sensor_id"], user_id=data["user_id"], is_registered=if_sensor_registered( data, session, sensor), in_use=if_uid_registered(data, session, user), timestamp=datetime.now()) session.add(record) return response(status=201)
def accept(self): from Director.Director import InsertDirectorAPI from database import create_debug_engine, create_session de = create_debug_engine(True) session = create_session(de) self.api = InsertDirectorAPI() self.data = self.line() if self.data[0] == "": return self.view.ui.label12.setText("Пожалуйста, заполните все поля!!!") self.n = self.api.insertUser(self.data[0],self.data[1],self.data[2],self.data[3],self.data[4]) if type(self.n) == str: return self.view.ui.label12.setText(self.n) session.add(self.n) return session.commit()
def run(): from database import create_session from mod_auth.models import User, Role from mod_auth.models import Page db = create_session(sys.argv[1]) # Create pages if not existing pages = Page.query.all() if len(pages) == 0: page_entries = { 'report.dashboard': 'Dashboard', 'config.notifications': 'Notification services', 'config.data_processing': 'Data processing', 'config.services': 'Honeypot services', 'auth.users': 'User manager', 'auth.access': 'Access manager', 'honeypot.profiles': 'Profile management', 'honeypot.manage': 'Honeypot management' } for name, pretty_name in page_entries.iteritems(): page = Page(name, pretty_name) db.add(page) db.commit() # Add support pages db.add(Page('support.about', 'About', True)) db.add(Page('support.support', 'Support', True)) db.commit() # Create admin role, or check if it already exists existing = Role.query.filter(Role.is_admin).first() if existing is None: role = Role("Admin") db.add(role) db.commit() existing = role else: # Check if there's at least one admin user admin = User.query.filter(User.role_id == existing.id).first() if admin is not None: print("Admin already exists: %s" % admin.name) return user = User(existing.id, sys.argv[2], sys.argv[3], User.generate_hash(sys.argv[4])) db.add(user) db.commit() print("Admin user created with name: %s" % user.name)
def run(): from mod_home.models import CCExtractorVersion, GeneralData from mod_regression.models import Category, RegressionTest, InputType, OutputType from mod_sample.models import Sample from mod_upload.models import Upload from mod_auth.models import User from database import create_session db = create_session(sys.argv[1]) categories = [ Category('Broken', 'Samples that are broken'), Category('DVB', 'Samples that contain DVB subtitles'), Category('DVD', 'Samples that contain DVD subtitles'), Category('MP4', 'Samples that are stored in the MP4 format'), Category('General', 'General regression samples') ] db.add_all(categories) db.commit() samples = [ Sample('sample1', 'ts', 'sample1'), Sample('sample2', 'ts', 'sample2') ] db.add_all(samples) db.commit() cc_version = CCExtractorVersion('0.84', '2016-12-16', '77da2dc873cc25dbf606a3b04172aa9fb1370f32') db.add(cc_version) db.commit() regression_tests = [ RegressionTest(1, '-autoprogram -out=ttxt -latin1', InputType.file, OutputType.file, 3, 10), RegressionTest(2, '-autoprogram -out=ttxt -latin1 -ucla', InputType.file, OutputType.file, 1, 10) ] db.add_all(regression_tests) db.commit() gen_data = GeneralData('last_commit', '71dffd6eb30c1f4b5cf800307de845072ce33262') db.add(gen_data) db.commit()
def _install_notification_service_thread(cls, update, description): """ Installs the necessary dependencies for the notification services :param cls: The instance of the notification service. :type cls: class :return: void :rtype: void """ from run import app apt_deps = getattr(cls, 'get_apt_dependencies')() if len(apt_deps) > 0: apt = ['apt-get', '-q', '-y', 'install'] apt.extend(apt_deps) print('Calling %s' % " ".join(apt)) # Call apt-get install _ph = subprocess.Popen(apt) _ph.wait() pip_deps = getattr(cls, 'get_pip_dependencies')() if len(pip_deps) > 0: pip = ['pip', 'install'] pip.extend(pip_deps) print('Calling %s' % " ".join(pip)) _ph = subprocess.Popen(pip) _ph.wait() getattr(cls, 'after_install_hook')() if not update: instance = cls(getattr(cls, 'get_extra_config_sample')()) notification = Notification( instance.__class__.__name__, description) db = create_session(app.config['DATABASE_URI']) db.add(notification) db.commit() db.close()
def setUp(self): self.app.preprocess_request() g.db = create_session( self.app.config['DATABASE_URI'], drop_tables=True) # enable Foreign keys for unit tests g.db.execute('pragma foreign_keys=on') general_data = [ GeneralData('last_commit', '1978060bf7d2edd119736ba3ba88341f3bec3323'), GeneralData('fetch_commit_' + TestPlatform.linux.value, '1978060bf7d2edd119736ba3ba88341f3bec3323'), GeneralData('fetch_commit_' + TestPlatform.windows.value, '1978060bf7d2edd119736ba3ba88341f3bec3323') ] g.db.add_all(general_data) self.ccextractor_version = CCExtractorVersion( '1.2.3', '2013-02-27T19:35:32Z', '1978060bf7d2edd119736ba3ba88341f3bec3323') g.db.add(self.ccextractor_version) fork = Fork('https://github.com/{user}/{repo}.git'.format(user=g.github['repository_owner'], repo=g.github['repository'])) g.db.add(fork) g.db.commit() dummy_user = User(signup_information['existing_user_name'], signup_information['existing_user_role'], signup_information['existing_user_email'], signup_information['existing_user_pwd']) g.db.add(dummy_user) g.db.commit() test = [ Test(TestPlatform.linux, TestType.pull_request, 1, 'master', '1978060bf7d2edd119736ba3ba88341f3bec3323', 1), Test(TestPlatform.linux, TestType.pull_request, 1, 'master', 'abcdefgh', 1) ] g.db.add_all(test) g.db.commit() categories = [ Category('Broken', 'Samples that are broken'), Category('DVB', 'Samples that contain DVB subtitles'), Category('DVD', 'Samples that contain DVD subtitles'), Category('MP4', 'Samples that are stored in the MP4 format'), Category('General', 'General regression samples') ] g.db.add_all(categories) g.db.commit() samples = [ Sample('sample1', 'ts', 'sample1'), Sample('sample2', 'ts', 'sample2') ] g.db.add_all(samples) g.db.commit() upload = [ Upload(1, 1, 1, Platform.windows), Upload(1, 2, 1, Platform.linux) ] g.db.add_all(upload) g.db.commit() regression_tests = [ RegressionTest(1, '-autoprogram -out=ttxt -latin1 -2', InputType.file, OutputType.file, 3, 10), RegressionTest(2, '-autoprogram -out=ttxt -latin1 -ucla', InputType.file, OutputType.file, 1, 10) ] g.db.add_all(regression_tests) g.db.commit() categories[0].regression_tests.append(regression_tests[0]) categories[2].regression_tests.append(regression_tests[1]) regression_test_outputs = [ RegressionTestOutput(1, 'sample_out1', '.srt', ''), RegressionTestOutput(2, 'sample_out2', '.srt', '') ] g.db.add_all(regression_test_outputs) g.db.commit() test_result_progress = [ TestProgress(1, TestStatus.preparation, "Test 1 preperation"), TestProgress(1, TestStatus.building, "Test 1 building"), TestProgress(1, TestStatus.testing, "Test 1 testing"), TestProgress(1, TestStatus.completed, "Test 1 completed"), TestProgress(2, TestStatus.preparation, "Test 2 preperation"), TestProgress(2, TestStatus.building, "Test 2 building"), TestProgress(2, TestStatus.testing, "Test 2 testing"), TestProgress(2, TestStatus.completed, "Test 2 completed") ] g.db.add_all(test_result_progress) g.db.commit() test_results = [ TestResult(1, 1, 200, 0, 0), TestResult(1, 2, 601, 0, 0), TestResult(2, 1, 200, 200, 0), TestResult(2, 2, 601, 0, 0) ] g.db.add_all(test_results) g.db.commit() test_result_files = [ TestResultFile(1, 1, 1, 'sample_out1'), TestResultFile(1, 2, 2, 'sample_out2'), TestResultFile(2, 1, 1, 'sample_out1'), TestResultFile(2, 2, 2, 'sample_out2', 'out2') ] g.db.add_all(test_result_files) g.db.commit() forbidden_mime = ForbiddenMimeType('application/javascript') forbidden_ext = [ ForbiddenExtension('js'), ForbiddenExtension('com') ] g.db.add(forbidden_mime) g.db.add_all(forbidden_ext) g.db.commit()
print('') print('podio: {}'.format(d[k][0])) print('local: {}'.format(d[k][1])) r=input('write from podio to db? (y/N)') if r.upper()=='Y': print('changing db') if not d[k][0]: # Card is not listed in podio, remove from db s.delete(d[k][1]) else: if d[k][1]: d[k][0].pin=d[k][1].pin # keep pin #if d[k][0].member: #session.merge(d[k][0].member) # add member from podio #d[k][0].member=d[k][0].member.item_id session.merge(d[k][0]) session.commit() podio_client.Comment.create('item',d[k][0].item_id,{'value':'Das Access Level wurde in der Schließsystemdatenbank aktualisiert.'},silent=False,hook=False) if __name__=='__main__': from database import create_session import sys podio_client=api.OAuthAppClient(config.podio_client_id,config.podio_client_secret,config.podio_app_id_nfc,config.podio_app_key_nfc) s=create_session(config.db) d,e=create_diff(s,podio_client,config.podio_app_id_nfc) if e: for x in e: print(x) if not e or '-f' in sys.argv: print('------------------') interactive_merge(s,d,podio_client)
import ConfigParser import database from flask import jsonify APP = flask.Flask(__name__) SESSION = None config = ConfigParser.SafeConfigParser() config.read(['./data/darkweb.cfg', '/etc/darkserver/darkweb.cfg']) args = {key: val for key, val in config.items('database')} if config.has_section('database') else {} if args.get('engine','') == 'postgres': db_url = "postgresql://{0}:{1}@{2}/{3}".format(args['username'], args['password'],args['host'],args['dbname']) SESSION = database.create_session(db_url) @APP.route('/buildids/<buildid>') def buildids(buildid): "Returns the details for the given buildid" ids = buildid.split(",") rows = SESSION.query(database.Buildid).filter(database.Buildid.build_id.in_(ids)) result = [] for row in rows: data = {} data['buildid'] = row.build_id data['elf'] = os.path.join(row.instpath,row.elfname) data['rpm'] = row.rpm_name data['distro'] = row.distro data['url'] = row.rpm_url
#!/usr/bin/env python3 import config import app_status import database as db s=db.create_session(config.db) r=app_status.send_status(s,'doorsbeat')
def before_request(): g.server_name = app.config.get('INSTANCE_NAME', '') g.menu_entries = {} g.db = create_session(app.config['DATABASE_URI']) g.version = "0.6"
def __init__(self,connection_string): self.session=db.create_session(connection_string)
#!/usr/bin/python import sys from os import path # Need to append server root path to ensure we can import the necessary files. sys.path.append(path.dirname(path.dirname(path.abspath(__file__)))) def process(database, file_to_process): from mod_upload.controllers import upload_ftp from run import log log.debug("Calling the FTP upload method from the controller!") upload_ftp(database, file_to_process) if __name__ == '__main__': from database import create_session from run import config, log db = create_session(config['DATABASE_URI']) file_path = str(sys.argv[1]) process(db, file_path)
def App_status(environ,start_response): s=db.create_session(config.db) start_response("200 OK",[]) return [get_status(s).encode('UTF-8')]
class Keyword(Base): __tablename__ = 'keywords' id = Column(Integer, primary_key=True) keyword = Column(String(50), nullable=False, unique=True) def __init__(self, keyword): self.keyword = keyword BlogPost.author = relationship('User', backref=backref('posts', lazy='dynamic')) ''' create_schema() session = create_session() manager = Position(name='manager', description='manager') senior_manager = Position(name='senior manager', description='senior manager') session.add(manager) session.add(senior_manager) group1 = Group(name='group1', description='group1') group2 = Group(name='group2', description='group2') department1 = Department(name='accounting', description='department of accounting') department2 = Department(name='marine', description='school of marine science')