def __init__(self, host='localhost', ships=[NumpyShip()]): self.hostname = host self.ships = ships self.client = rejson.Client(host=host, decode_responses=True) self.client_no_decode = rejson.Client(host=host) self.metadata_listener = MetadataListener(self) self.INTERFACE_LOCK = Lock() self.label_to_shipper = {} for sh in self.ships: self.label_to_shipper[sh.get_label()] = sh self.shipper_labels = [sh.get_label() for sh in ships]
def __init__(self, interface): self.client = rejson.Client(host=interface.hostname) self.pubsub = self.client.pubsub() self.pubsub.psubscribe(['__keyspace@0__:*']) self.listeners = {} super().__init__()
def __init__(self, interface, channel_name, callback_function, kwargs): Thread.__init__(self) self.client = rejson.Client(host=interface.hostname) self.pubsub = self.client.pubsub() self.pubsub.psubscribe([channel_name]) self.callback_function = callback_function self.kwargs = kwargs self.first_item_seen = False
def post(self) -> Response: """ Given a user's JWT and the name of an organization that the user has access to, this function will return all repositories that the user has access to. Returns ------- array Array of repositories """ # Get access_token from decoded JWT passed in from frontend org_id = request.values["org_id"] decoded_jwt = decode(request.values["jwt"], "kondo-secret", algorithms=["HS256"]) req = requests.get( "https://api.github.com/user/installations/" + org_id + "/repositories", headers={ "Accept": "application/vnd.github.machine-man-preview+json", "Authorization": "token " + decoded_jwt["access_token"], }, ) # Connect to Redis redis_host = current_app.config["REDIS_HOST"] r = rejson.Client(host=redis_host, port=6379, db=0, decode_responses=True) # log.info("Connected to Redis server: " + redis_host) repos_json = req.json()["repositories"] repos = [] for repo in repos_json: repo_id: int = repo["id"] processed_repo = json.loads(r.jsonget(repo_id)) logger.debug("JSON repo info retrieved from Redis: " + str(processed_repo)) repos.append({ "name": repo["name"], "id": str(repo["id"]), "url": repo["html_url"], "clone_url": repo["clone_url"], "room_type": processed_repo["room_type"], "violations": processed_repo["violations"], }) return Response(json.dumps(repos), status=200, mimetype="application/json")
def process_repositories(): """ This is the primary entrypoint for the repo_processor. It uses all of the other functions to discover where kondo has been installed, what repos each of them have, downloads them, scans them, and updates the cache with all necessary information for the backend API to utilize. This function should ONLY be used for connecting all of the others, it shouldn't have any additional logic within. """ # Initialize Room Engine rooms = room_engine.initialize_rooms() # Load all the rooms into memory # Connect to Redis redis_host = app.config["REDIS_HOST"] r = rejson.Client(host=redis_host, port=6379) logger.info("Connected to Redis server: " + redis_host) # Process Every Repo installations = get_installations() for install in installations: auth_token = get_access_token(install["id"]) repositories: [Repo] = get_installation_repositories(auth_token) for repo in repositories: # Clone Repos target_dir = app.config["CACHE_DIRECTORY"] + "/" + repo.name logger.debug("Cloning " + repo.name) git_tools.clone_repository( clone_url=repo.clone_url, username="******", password=auth_token, target_dir=target_dir, ) # Detect Repository Type room_type = room_engine.detect_repository_type(path=target_dir, rooms=rooms) logger.debug(repo.name + " detected as " + room_type) repo.room_type = room_type # Validate repository using room engine settings = { "CHANGELOG_DISABLED": False, "LICENSE_DISABLED": False, "PRECOMMIT_HOOKS_DISABLED": False, "GLOBAL_JENKINSFILE_ENABLED": True, } # Check for violations unless room_type is unknown if room_type == "unknown": logger.debug("Unable to validate repo: " + repo.name + ", room_type not detected") violations = "False" else: violations = room_engine.get_violations(rooms[room_type], target_dir, settings=settings) logger.debug("Validation output: " + str(violations)) repo.violations = violations # Update Redis repo_json = json.dumps(repo.to_json()) logger.debug("Updated repo info stored in redis: " + str(repo_json)) repo_id: int = repo.id r.jsonset(repo_id, rejson.Path.rootPath(), repo_json)
def connect(): global red , redis_host , redis_port ; red = rejson.Client(host=redis_host, port=redis_port, decode_responses=True)
def get_db(conf): return rejson.Client(host=conf['host'], port=int(conf['port']), db=int(conf['id']), decode_responses=True)
import rejson import time import os r = rejson.Client(host=os.environ['REDIS_HOST'], password=os.environ['REDIS_PASS'], port=os.environ['REDIS_PORT'], db=0) while True: pipe = r.pipeline(transaction=False) path = "path:sonde:foo" pipe.jsonset(path, rejson.Path.rootPath(), { "type" : "Feature", "geometry" : { "type": "LineString", "coordinates": [], }, "properties": { "coordinateProperties": [] } }, nx=True) pipe.jsonarrappend(path, rejson.Path(".geometry.coordinates"), [47, 15, 920]) per_sample_props = { "type":"Feature", "geometry":{ "type":"Point", "coordinates":[26.2153,60.34085,417.0] }, "properties":{ "temp":3.6, "serial":"S0730589",
from flask_sqlalchemy import SQLAlchemy # class CustomApi(flask_restful.Api): # def handle_error(self, e): # print (make_response(jsonify({"error":str(e)}),500)) # return 'ok' import warnings warnings.filterwarnings("ignore") REDIS_URL = "redis://localhost:6379/0" pool = redis.ConnectionPool(host='localhost', port=6379, db=0) redis_db = redis.Redis(connection_pool=pool) rj = rejson.Client(host='localhost', port=6379, decode_responses=True) app = Flask(__name__) app.config.from_pyfile('config.py') db = SQLAlchemy(app) mail = Mail(app) jwt = JWTManager(app) @jwt.token_in_blacklist_loader def check_if_token_in_blacklist(decrypted_token): jti = decrypted_token['jti'] return model.Revoked.is_jti_blacklisted(jti)
class TestJobsAPI(unittest.TestCase): url = 'http://127.0.0.1:5000/' redis = rejson.Client(host='127.0.0.1', port=6379, decode_responses=True) def test_post_and_delete_job_valid(self): r = post(f'{self.url}jobs', json=['test_item1', 'test_item2', 'test_item3']) self.assertEqual(r.status_code, 201) self.assertIsInstance(r.json(), dict) self.assertEqual(r.json()['status'], 'ok') self.assertIsInstance(r.json()['job_id'], str) d = delete(f'{self.url}jobs/{r.json()["job_id"]}') self.assertEqual(d.status_code, 200) self.assertIsInstance(d.json(), dict) self.assertEqual(d.json()['status'], 'ok') def test_post_job_invalid(self): r = post(f'{self.url}jobs', json=[]) self.assertEqual(r.status_code, 400) self.assertIsInstance(r.json(), dict) r = post(f'{self.url}jobs', json='its_not_array') self.assertEqual(r.status_code, 400) def test_get_job_valid(self): r = post(f'{self.url}jobs', json=['test_item1', 'test_item2', 'test_item3']) g = get(f'{self.url}jobs/{r.json()["job_id"]}') self.assertEqual(g.status_code, 200) self.assertEqual(g.json()['status'], 'ok') d = delete(f'{self.url}jobs/{r.json()["job_id"]}') self.assertEqual(d.status_code, 200) self.assertIsInstance(d.json(), dict) self.assertEqual(d.json()['status'], 'ok') def test_get_job_invalid(self): g = get(f'{self.url}jobs/this_job_is_not_exists') self.assertEqual(g.status_code, 400) self.assertEqual(g.json()['status'], 'error') def test_delete_job_invalid(self): d = delete(f'{self.url}jobs/this_job_is_not_exists') self.assertEqual(d.status_code, 400) self.assertIsInstance(d.json(), dict) self.assertEqual(d.json()['status'], 'error') def test_get_items_valid(self): r = post(f'{self.url}jobs', json=['test_item1', 'test_item2', 'test_item3']) self.assertEqual(r.json()['status'], 'ok') it = get(f'{self.url}jobs/{r.json()["job_id"]}/items') self.assertEqual(it.status_code, 200) self.assertIn('items', it.json()) self.assertIn('test_item1', it.json()['items']) self.assertIn('test_item2', it.json()['items']) self.assertIn('test_item3', it.json()['items']) self.assertEqual(len(it.json()['items']), 3) d = delete(f'{self.url}jobs/{r.json()["job_id"]}') self.assertEqual(d.json()['status'], 'ok') def test_get_items_invalid(self): r = post(f'{self.url}jobs', json=['test_item1', 'test_item2', 'test_item3']) self.assertEqual(r.json()['status'], 'ok') it = get(f'{self.url}jobs/this_job_is_not_exists/items') self.assertEqual(it.status_code, 400) self.assertEqual(it.json()['status'], 'error') self.assertNotIn('items', it.json()) d = delete(f'{self.url}jobs/{r.json()["job_id"]}') self.assertEqual(d.json()['status'], 'ok') def test_get_next_valid(self): r = post(f'{self.url}jobs', json=['test_item1', 'test_item2', 'test_item3']) self.assertEqual(r.json()['status'], 'ok') active_it = get(f'{self.url}jobs/{r.json()["job_id"]}/items?active=true') self.assertEqual(active_it.status_code, 200) self.assertIn('test_item1', active_it.json()['items']) self.assertIn('test_item2', active_it.json()['items']) self.assertIn('test_item3', active_it.json()['items']) self.assertEqual(len(active_it.json()['items']), 3) next_it = get(f'{self.url}jobs/{r.json()["job_id"]}/next') active_it_new = get(f'{self.url}jobs/{r.json()["job_id"]}/items?active=true') self.assertNotIn(next_it.json()['item'], active_it_new.json()['items']) self.assertEqual(len(active_it_new.json()['items']), 2) d = delete(f'{self.url}jobs/{r.json()["job_id"]}') self.assertEqual(d.json()['status'], 'ok') self.redis.delete(f'hold_{next_it.json()["item"]}')
def main(): rj = rejson.Client(host=redis_host, port=redis_port, decode_responses=True) rj.flushall() json_data = [] load_data(json_data, rj)
def make_redis_client(host='localhost', port=6379, db=0): """Return plain redis client + rejson client""" r = rejson.Client(host=host, port=port, db=db, decode_responses=False) j = rejson.Client(host=host, port=port, db=db, decode_responses=True) return r, j
from tesdaq.listen import TestListener from tesdaq.task import TaskRestriction from tesdaq.task.serialize import TDEncoder import rejson r = rejson.Client(encoder=TDEncoder(), decode_responses=True) analog_in = TaskRestriction( num_tasks=1, valid_channels=["Dev1","Chan2"], valid_timing=["timing"], valid_trigger=["Trigger"], min_sample_rate=10, max_sample_rate=10000, volt_ranges=[(-5,5)], sr_is_per_chan=True) digital_in = TaskRestriction( num_tasks=1, valid_channels=["Dev2","Chan2"], valid_timing=["timixxxxxn"], valid_trigger=["Trigegigigiigr"], min_sample_rate=10, max_sample_rate=10000, volt_ranges=[(-5,5)], sr_is_per_chan=True) testlistener = TestListener("test", r, analog_in=analog_in, digital_in=digital_in) testlistener.wait()