Exemple #1
0
def safecopy(path):
    log = ColoredLogger('safecopy')
    os.makedirs(get_config()['CACHE_DIR'].encode('utf-8'), exist_ok=True)
    tmp = os.path.basename(path + '.').encode('ascii', 'ignore').decode('ascii')
    cachedir = get_config()['CACHE_DIR']
    fd, destname = mkstemp(prefix=tmp, suffix=os.path.splitext(path)[1],
                           dir=cachedir)
    shutil.copy(path, destname)
    log.debug("Copied %s in %s" % (path, destname))

    return destname
Exemple #2
0
    def __init__(self):
        QtCore.QObject.__init__(self)
        self.log.debug("%s instantiating" % self.__class__.__name__)
        self.player = QtPlayer()
        self.audio_source = EventAggregator()
        if get_config()['SOCKET_TCP']:
            self.tcp_socket = TCPCommandSocket(self,
                                               get_config()['SOCKET_TCP'])
        if get_config()['SOCKET_HTTP']:
            self.http_socket = HTTPCommandSocket(self,
                                                 get_config()['SOCKET_HTTP'])

        self.player.empty.connect(self.on_empty)
Exemple #3
0
    def __init__(self):
        QtCore.QObject.__init__(self)
        self.log.debug("%s instantiating" % self.__class__.__name__)
        self.player = QtPlayer()
        self.audio_source = EventAggregator()
        if get_config()['SOCKET_TCP']:
            self.tcp_socket = TCPCommandSocket(
                self, get_config()['SOCKET_TCP'])
        if get_config()['SOCKET_HTTP']:
            self.http_socket = HTTPCommandSocket(
                self, get_config()['SOCKET_HTTP'])

        self.player.empty.connect(self.on_empty)
Exemple #4
0
def fix_record_json(file_name, year, exchangeID, review_flag):
    # try:
    #     folder_path = get_config()['excel_path']
    #     json_file_name = create_json_filename_from_excel(file_name)
    #     json_path = folder_path + json_file_name

    #     data = get_data(file_name)
    #     record = data[year]['records'][exchangeID]
    #     record['Processed'] = True
    #     record['Needs_Reviewing'] = review_flag
    #     record['answers'] = data[year]['records']['answers']
    #     data[year]['records'][exchangeID] = record
    #     with open(json_path, 'w') as output_file:
    #         json.dump(data, output_file)
    #         print('fixed!')
    #     return 'Success'
    # except:
    #     return 'Error'

    folder_path = get_config()['excel_path']
    json_file_name = create_json_filename_from_excel(file_name)
    json_path = folder_path + json_file_name

    data = get_data(file_name)
    record = data[year]['records'][exchangeID]
    record['Processed'] = True
    record['Needs_Reviewing'] = review_flag
    data[year]['records'][exchangeID] = record
    with open(json_path, 'w') as output_file:
        json.dump(data, output_file)
        print('fixed!')
    return 'Success'
Exemple #5
0
def get_db_conn(collection_name):
	if consul_server == "127.0.0.1" and local_mode == False:
		return None
	else:
		if local_mode == False:
			ret = config_manager.discover_service(consul_server, "mongo")
			if ret.__class__.__name__ not in ('list', 'touple'):
				return None
			if len(ret) == 0:
				return None

			mongodb_host = ret[0]["Address"]
			mongodb_port = int(ret[0]["ServicePort"])
			config_arr = config_manager.get_config(consul_server, ["mongodb_name"])
		else:
			mongodb_host = "127.0.0.1"
			mongodb_port = 27017
			config_arr = []
			config_arr.append({"mongodb_name": "mydb"})
		try:
			client = MongoClient(mongodb_host, mongodb_port,
			                     serverSelectionTimeoutMS=1500)
			db_us = client[config_arr[0]["mongodb_name"]]
			collection = db_us[collection_name]
			return collection
		except Exception, ex:
			print ex
			return None
Exemple #6
0
def get_list_of_all_files():
    path_to_watch = get_config()['excel_path']

    files_dict = {}
    for files in os.listdir(path_to_watch):
        files_dict[files] = path_to_watch + r'\\' + files
    return files_dict
Exemple #7
0
    def __init__(self):
        self.log.debug("%s instantiating" % self.__class__.__name__)
        self.library = AudioLibrary(get_config()['BOBINA_PATH'])
        self.pool = Queue()
        self.library_index = 0

        self.prefetch(10)
Exemple #8
0
def _get_rabbit_connection_info(queue_key):
	"""
	Establishes a blocking RabbitMQ connection.
	TODO: Make this into a helper class or singleton
	Returns:
		error_msg, {queue_name, connection}
	"""

	if consul_server == "127.0.0.1":
		return "Consul server is set to 127.0.0.1", None
	look_for_service_name = "docker-rabbitmq-5672"
	found_service = config_manager.discover_service(consul_server, look_for_service_name)
	if found_service.__class__.__name__ not in ('list', 'tuple'):
		return "Service class not in expected format", None
	if len(found_service) == 0:
		return "No services found for `%s`" % look_for_service_name, None

	rabbitmq_host = found_service[0]["Address"]
	rabbitmq_port = int(found_service[0]["ServicePort"])

	config_arr = config_manager.get_config(consul_server, [queue_key, "rabbitmq_user", "rabbitmq_pass"])
	rabbit_username = config_arr[0]["rabbitmq_user"]
	rabbit_password = config_arr[0]["rabbitmq_pass"]

	try:
		credentials = pika.PlainCredentials(rabbit_username, rabbit_password)
		parameters = pika.ConnectionParameters(rabbitmq_host, rabbitmq_port, '/', credentials)
		connection = pika.BlockingConnection(parameters)
		return None, connection
	except Exception, ex:
		return traceback.print_exc(), None
Exemple #9
0
    def schedule_next(self, ev, force=False):
        '''
        Questo metodo controlla se l'evento specificato dall'id deve suonare
        anche nel futuro, e in caso predispone entrambi i timer (quello verso
        noi stessi e quello verso il BellCacher
        '''

        session = sessionmaker(bind=self.engine)()

        def get_timer(t):
            '''
            Ritorna un QTimer che suona al tempo t
            (NON tra t secondi)
            '''
            delta = t - datetime.now()
            timer = QtCore.QTimer()
            timer.setInterval(delta.total_seconds() * 1000)
            timer.setSingleShot(True)
            return timer

        if force:
            ev = session.query(tamarradb.Event).filter_by(id=ev.id).first()
        if ev is None:  # L'evento non esiste
            self.log.info("L'evento [%d] non e' piu' sul db" % ev.id)
            return
        now = datetime.now()
        event_time = ev.alarm.next_ring(
            now + timedelta(seconds=get_config()['CACHING_TIME']))
        self.log.debug("Suona alle %s, sono le %s (tra %d secondi)" %
                      (event_time, now, (event_time-now).total_seconds()))
        assert event_time > \
            (datetime.now() + timedelta(seconds=get_config()['CACHING_TIME']))

        cacher = BellCacher(ev.action, event_time)
        cacher.reschedule = get_timer(event_time)
        self.connect(cacher.reschedule, QtCore.SIGNAL('timeout()'),
                     functools.partial(self.schedule_next, ev, True))
        cacher.reschedule.start()
        # TODO: una funzione che controlla se il tutto esiste ancora; dobbiamo
        # definirla noi per fare dependency inversion
        cacher.checker = None
        cacher.ready.connect(lambda b: self.bell_ready.emit(b))
        cacher.timer = get_timer(event_time -
                              timedelta(seconds=get_config()['CACHING_TIME']))
        cacher.timer.timeout.connect(cacher.run)
        cacher.timer.start()
        self.running_cachers.append(cacher)
Exemple #10
0
 def __init__(self):
     QtCore.QObject.__init__(self)
     self.log.debug("%s instantiating" % self.__class__.__name__)
     #TODO: self.connection = config.boh
     self.engine = create_engine(get_config()['DB'])
     tamarradb.Base.metadata.create_all(self.engine)
     self.visited = defaultdict(lambda: None)  # id-on-db: datetime
     self.running_cachers = [] # they are here because they need not to expire
Exemple #11
0
def get_timefile_exact(time):
    '''
    time is of type `datetime`; it is not "rounded" to match the real file;
    that work is done in get_timefile(time)
    '''
    return os.path.join(
        get_config()['AUDIO_INPUT'],
        time.strftime('%Y-%m/%d/rec-%Y-%m-%d-%H-%M-%S-ror.mp3')
        )
Exemple #12
0
def get_param(known=None):
    args = argparse.ArgumentParser()

    args.add_argument('--name', type=str, required=True)

    args.add_argument('--gpus', type=str, default='-1')
    args.add_argument('--resume', action='store_true')
    args.add_argument('--abspath', type=str, default='/root/datasets')
    args.add_argument('--config_mode', type=str, default='')
    args.add_argument('--doa_loss',
                      type=str,
                      default='MSE',
                      choices=['MAE', 'MSE', 'MSLE', 'MMSE'])
    args.add_argument('--model',
                      type=str,
                      default='seldnet',
                      choices=[
                          'seldnet', 'seldnet_v1', 'seldnet_architecture',
                          'xception_gru'
                      ])
    args.add_argument('--model_config', type=str, default='')

    # training
    args.add_argument('--lr', type=float, default=0.001)
    args.add_argument('--decay', type=float, default=0.9)
    args.add_argument('--batch', type=int, default=256)
    args.add_argument('--epoch', type=int, default=1000)
    args.add_argument('--loss_weight', type=str, default='1,1000')
    args.add_argument('--patience', type=int, default=100)
    args.add_argument('--freq_mask_size', type=int, default=8)
    args.add_argument('--time_mask_size', type=int, default=24)
    args.add_argument('--loop_time',
                      type=int,
                      default=5,
                      help='times of train dataset iter for an epoch')

    # metric
    args.add_argument('--lad_doa_thresh', type=int, default=20)

    config = args.parse_known_args(known)[0]

    # model config
    if len(config.model_config) == 0:
        config.model_config = config.model
    model_config_name = config.model_config
    model_config = model_config_name + '.json'
    model_config = os.path.join('./model_config', model_config)

    if not os.path.exists(model_config):
        raise ValueError('Model config is not exists')
    model_config = argparse.Namespace(**json.load(open(model_config, 'rb')))

    config.name = f'{config.model}_{model_config_name}_{config.doa_loss}_{config.name}'
    config = get_config(config.name, config, mode=config.config_mode)

    return config, model_config
Exemple #13
0
 def __init__(self):
     QtCore.QObject.__init__(self)
     logger.debug("%s instantiating" % self.__class__.__name__)
     # TODO: self.libraries = \
     # {dir, AudioLibrary(dir) for dir in os.listdir(config.libraries)}
     self.events = set()
     self.path = get_config()['EVENTS_PATH']
     self.watch = QtCore.QFileSystemWatcher()
     for d in self.path:
         self.watch.addPath(d)
     self.watch.directoryChanged.connect(self.on_change)
Exemple #14
0
def run():
    parser = argparse.ArgumentParser(
        description='Generate Israeli politicians\' tweets')
    parser.add_argument('-f',
                        '--fetch',
                        action='store_true',
                        help='Fetch original tweets (default: False)')
    parser.add_argument('-p',
                        '--post',
                        action='store_true',
                        help='Post new tweets (default: False)')

    args = parser.parse_args()

    config = config_manager.get_config('general')
    original_tweets_dir = config['original_tweets_dir']
    new_tweets_file = config['new_tweets_file']

    if args.fetch:
        tweeter_handler = TwitterHandler()
        tweeter_handler.fetch_all(original_tweets_dir)

    twitter_users = config_manager.get_config('twitter_users')
    combined_model = None
    with open(new_tweets_file, 'w', encoding='utf8') as output:
        for filename in listdir(original_tweets_dir):
            with open(path.join(original_tweets_dir, filename),
                      encoding='utf8') as f:
                original_tweets = f.read()
                new_tweets, model = tweet_maker.make_tweets_from_text(
                    original_tweets, 10)
                combined_model = tweet_maker.combine(combined_model, model)
                output_tweets(new_tweets, filename, twitter_users[filename],
                              output)
        new_combined_tweets = tweet_maker.make_tweets_from_model(
            combined_model, 20)
        output_tweets(new_combined_tweets, config['bot_screen_name'],
                      config['bot_name'], output)
Exemple #15
0
    def __init__(self):
        twitter_config = config_manager.get_config('twitter_config')

        auth = tweepy.OAuthHandler(twitter_config['consumer_key'],
                                   twitter_config['consumer_secret'])
        auth.set_access_token(twitter_config['access_token'],
                              twitter_config['access_token_secret'])

        self.twitter_api = tweepy.API(auth)

        self.url_pattern = re.compile(
            r'https?://t.co/\w+',
            re.RegexFlag.MULTILINE | re.RegexFlag.UNICODE)
        self.rt_pattern = re.compile(
            r'^RT', re.RegexFlag.MULTILINE | re.RegexFlag.UNICODE)
        self.twitter_config = twitter_config
Exemple #16
0
def get_data(file_name):
    folder_path = get_config()['excel_path']
    excel_path = folder_path + file_name
    json_file_name = create_json_filename_from_excel(file_name)
    json_path = folder_path + json_file_name
    # print(json_path)
    if check_if_json_record_exists(file_name):
        # print('it exists!')
        with open(json_path) as json_file:
            data = json.load(json_file)
            return data
    else:
        raw_data = get_initial_data_from_excel(excel_path)
        with open(json_path, 'w') as output_file:
            json.dump(raw_data, output_file)
            return raw_data
Exemple #17
0
    def _route(self):
        ### This is the API part of the app
        # TODO: move to namespace /api/
        # TODO: create a "sub-application"

        ## Static part of the site
        self._app.route('/output/<filepath:path>',
                        callback=lambda filepath:
                        static_file(filepath,
                                    root=get_config()['AUDIO_OUTPUT']))
        self._app.route('/static/<filepath:path>',
                        callback=lambda filepath: static_file(filepath,
                                                              root='static/'))
        self._app.route('/', callback=lambda: redirect('/new.html'))
        self._app.route('/new.html',
                        callback=partial(static_file, 'new.html',
                                         root='pages/'))
        self._app.route('/tempo.html',
                        callback=partial(static_file, 'tempo.html',
                                         root='pages/'))
Exemple #18
0
def update_record_json(file_name, year, exchangeID, answers, review_flag):
    try:
        folder_path = get_config()['excel_path']
        json_file_name = create_json_filename_from_excel(file_name)
        json_path = folder_path + json_file_name

        data = get_data(file_name)
        record = data[year]['records'][exchangeID]
        record['Processed'] = True
        record['Needs_Reviewing'] = review_flag
        record['answers'] = answers
        data[year]['records'][exchangeID] = record
        with open(json_path, 'w') as output_file:
            json.dump(data, output_file)

        # temp_dict = { 'file_name': file_name, 'year': year, 'exchangeID': exchangeID, 'answers': answers}
        # with open('temp4.json', 'w') as output_file:
        #      json.dump(temp_dict, output_file)
        return 'Success'
    except:
        return 'Error'
Exemple #19
0
 def generate(self):
     # prendiamo la rec in causa
     recid = dict(request.POST.allitems())['id']
     rec = self.db._search(_id=recid)[0]
     if rec.filename is not None and os.path.filename.exists(rec.filename):
         return {'status': 'ready',
                 'message': 'The file has already been generated at %s' %
                 rec.filename,
                 'rec': rec
                 }
     rec.filename = 'ror-%s-%s.mp3' % \
                    (rec.starttime.strftime('%y%m%d_%H%M'),
                     filter(lambda c: c.isalpha(), rec.name))
     self.db.update(rec.id, rec.serialize())
     job_id = get_process_queue().submit(
         create_mp3,
         start=rec.starttime,
         end=rec.endtime,
         outfile=os.path.join(get_config()['AUDIO_OUTPUT'], rec.filename))
     print "SUBMITTED: %d" % job_id
     return self.rec_msg("Aggiornamento completato!",
                         job_id=job_id,
                         result='/output/' + rec.filename,
                         rec=rec_sanitize(rec))
Exemple #20
0
                                                              root='static/'))
        self._app.route('/', callback=lambda: redirect('/new.html'))
        self._app.route('/new.html',
                        callback=partial(static_file, 'new.html',
                                         root='pages/'))
        self._app.route('/tempo.html',
                        callback=partial(static_file, 'tempo.html',
                                         root='pages/'))


if __name__ == "__main__":
    configs = ['default_config.py']
    if 'TECHREC_CONFIG' in os.environ:
        for conf in os.environ['TECHREC_CONFIG'].split(':'):
            if not conf:
                continue
            path = os.path.realpath(conf)
            if not os.path.exists(path):
                logger.warn("Configuration file '%s' does not exist; skipping"
                            % path)
                continue
            configs.append(path)
    os.chdir(os.path.dirname(os.path.realpath(__file__)))
    for conf in configs:
        get_config().from_pyfile(conf)
    c = RecServer()
    c._app.mount('/date', DateApp())
    c._app.mount('/api', RecAPI())
    c._app.run(host=get_config()['HOST'], port=get_config()['PORT'],
               debug=get_config()['DEBUG'])
Exemple #21
0
 def pre_quit(self):
     self.log.info("About to quit, cleaning up...")
     for name in os.listdir(get_config()['CACHE_DIR']):
         if name.endswith('.wav'):
             os.unlink(os.path.join(get_config()['CACHE_DIR'], name))
Exemple #22
0
    def fetch_all(self, output_dir):
        twitter_users = config_manager.get_config('twitter_users')

        for twitter_user in twitter_users:
            self.fetch(twitter_user, output_dir)
def generate_excel_CMA(file_name):
    # Definitions
    UCDPConflictID = 'UCDPConflictID'
    UCDPCountryID = 'UCDPCountryID'
    COWStateAbb = 'COWStateAbb'
    COWCCode = 'COWCCode'
    SideA = 'SideA'
    SideA_ID = 'SideA ID'
    SideB = 'SideB'
    SideB_ID = 'SideB ID'
    YEAR = 'YEAR'
    Region = 'Region'
    ExchangeID = 'ExchangeID'
    CMA = 'CMA'
    ExTCOWCode = 'ExTCOWCode'
    Client = 'Client'
    ClientCode = 'ClientCode'
    DClient = 'DClient'
    ForThird = 'ForThird'
    Consumer = 'Consumer'
    ConsumerID = 'ConsumerID'
    CoOrigin = 'CoOrigin'
    CoOriginCode = 'CoOriginCode'
    AgentId = 'AgentId'
    OpOrigin = 'OpOrigin'
    OpOriginCode = 'OpOriginCode'
    Task = 'Task'
    AgentSt = 'AgentSt'
    OwnSt = 'OwnSt'
    SizeBest = 'SizeBest'
    SizeMin = 'SizeMin'
    SizeMax = 'SIzeMax'
    Rly = 'Rly'

    headers = [
        UCDPConflictID, UCDPCountryID, COWStateAbb, COWCCode, SideA, SideA_ID,
        SideB, SideB_ID, YEAR, Region, ExchangeID, CMA, ExTCOWCode, Client,
        ClientCode, DClient, ForThird, Consumer, ConsumerID, CoOrigin,
        CoOriginCode, AgentId, OpOrigin, OpOriginCode, Task, AgentSt, OwnSt,
        SizeBest, SizeMin, SizeMax, Rly
    ]

    textBoxHeaders = {
        ClientCode: ClientCode,
        'ConsumerCode': ConsumerID,
        CoOriginCode: CoOriginCode,
        OpOriginCode: OpOriginCode,
        'AgentIdCode': AgentId,
    }

    headerPosition = {}

    #Open up json file
    # json_path = r'C:\Users\tom_b\Desktop\Documents\Python\vcoding\Niger_New.json'
    # json_path = r'C:\Users\tom_b\Desktop\Documents\Python\vcoding\jsonfix\Chad_Master-File.json'
    folder_path = get_config()['excel_path']
    print(folder_path)
    destination_path = get_config()['destination_path']
    print(destination_path)

    #### IMPLEMENT A COMPLETED PATH IN CONFIG AND SETTINGS PAGES

    json_file_name = create_json_filename_from_excel(file_name)
    print(json_file_name)
    json_path = folder_path + json_file_name
    print(json_path)
    with open(json_path, 'r') as data_json_file:
        data = json.load(data_json_file)

    excel_completed_filename = 'CMA_TABLE_' + file_name
    excel_completed_filepath = destination_path + excel_completed_filename
    # Create an new Excel file and add a worksheet.yy
    # workbook = xlsxwriter.Workbook('test.xlsx')
    workbook = xlsxwriter.Workbook(excel_completed_filepath)
    worksheet = workbook.add_worksheet()

    # Add a bold format to use to highlight cells.
    bold = workbook.add_format({'bold': True})

    needs_review_format = workbook.add_format()
    needs_review_format.set_font_color('red')
    needs_review_format.set_bold()
    needs_review_format.set_font_size(30)

    for index, header in enumerate(headers):
        worksheet.write(0, index, header, bold)
        worksheet.write(0, index + 1, 'REVIEW_FLAG', bold)
        headerPosition['REVIEW_FLAG'] = index + 1
        headerPosition[header] = index
        # worksheet.write(1, index, 'test!')
        # print(str(index) + ': ' + header)

    # print(headerPosition)

    countryCode = data['country']
    countryCodeId = get_country_code_id(countryCode)

    current_row = 1

    for index, yearIndex in enumerate(data.keys()):
        # print(index)
        # print(yearIndex)
        if type(data[yearIndex]) == dict:
            for record_index, record_id in enumerate(
                    data[yearIndex]['records']):
                processed_bool = data[yearIndex]['records'][record_id][
                    'Processed']
                review_bool = data[yearIndex]['records'][record_id][
                    'Needs_Reviewing']
                if processed_bool:

                    worksheet.write(current_row, headerPosition[YEAR],
                                    yearIndex)
                    worksheet.write(current_row, headerPosition[ExchangeID],
                                    record_id)
                    worksheet.write(current_row, headerPosition[COWStateAbb],
                                    countryCode)
                    worksheet.write(current_row, headerPosition[COWCCode],
                                    countryCodeId)
                    size_best_arr = data[yearIndex]['records'][record_id][
                        SizeBest]['Information']
                    size_min_arr = data[yearIndex]['records'][record_id][
                        SizeMin]['Information']
                    size_max_arr = data[yearIndex]['records'][record_id][
                        SizeMax]['Information']
                    size_best = ''
                    size_min = ''
                    size_max = ''

                    for index, line in enumerate(size_best_arr):
                        print(index)
                        if index > 1:
                            seperator = ' // '
                        else:
                            seperator = ''

                        size_best = size_best + str(line) + seperator

                    for index, line in enumerate(size_min_arr):
                        if index > 1:
                            seperator = ' // '
                        else:
                            seperator = ''
                        size_min = size_min + str(line) + seperator

                    for index, line in enumerate(size_max_arr):
                        if index > 1:
                            seperator = ' // '
                        else:
                            seperator = ''
                            size_max = size_max + str(line) + seperator

                    # if float - convert to string
                    # if array join

                    worksheet.write(current_row, headerPosition[SizeBest],
                                    size_best)
                    worksheet.write(current_row, headerPosition[SizeMin],
                                    size_min)
                    worksheet.write(current_row, headerPosition[SizeMax],
                                    size_max)
                    # print(review_bool)
                    if review_bool:
                        position = headerPosition['REVIEW_FLAG']
                        worksheet.write(current_row, position, '1',
                                        needs_review_format)
                        current_row = current_row + 1

                    else:
                        print(record_id)
                        for dataId in data[yearIndex]['records'][record_id][
                                'answers']:
                            key = ''
                            value = ''
                            position = 50
                            # print(headerPosition[dataId])
                            if dataId in textBoxHeaders:
                                # print(dataId)
                                key = textBoxHeaders[dataId]
                                value = data[yearIndex]['records'][record_id][
                                    'answers'][dataId][0]
                                position = headerPosition[key]
                            elif dataId == 'AgentId':
                                print("")
                            else:
                                key = dataId
                                value = data[yearIndex]['records'][record_id][
                                    'answers'][dataId][1]
                                position = headerPosition[key]
                            worksheet.write(current_row, position, value)
                        current_row = current_row + 1

                        # if dataId == 'ConsumerCode':
                        #     print(dataId)
                        # worksheet.write(index + 1, headerPosition[dataId],data[yearIndex]['records'][record_id]['answers'][dataId][1] )

    workbook.close()


# excel_files = get_files()
# for file_name in excel_files.keys():
#     generate_excel_CMA(file_name)
Exemple #24
0
import pusher
from log_writer import *
from dbadapter import get_logged_in_user_organization
import config_manager
from helper import get_consul_server

logger = get_logger("pusher-util")

consul_server = get_consul_server()
config_arr = config_manager.get_config(consul_server, ["pusher_key", "pusher_app_id", "pusher_secret"])

pusher_client = pusher.Pusher(
  app_id=config_arr[0]["pusher_app_id"],
  key=config_arr[0]["pusher_key"],
  secret=config_arr[0]["pusher_secret"],
  ssl=False
)

def send_pusher_message(event, data, organization_id=None):
	try:		
		if organization_id !=None:
			channel = "cedarwood_"+organization_id
		else:
			channel = "cedarwood_"+get_logged_in_user_organization()["EntityID"]
			
		pusher_client.trigger(channel, event, data)
		debug(logger,"on channel:"+channel+" event:"+ event + " sent...")
	except Exception, e:
		error(logger, e)

def send_pusher_doc_message(event, data):
Exemple #25
0
 def get(self):
     self.response.headers['Content-Type'] = 'text/plain'
     self.response.write(
         repr(config_manager.get_config('OPERATIONAL Job Fetch Bots')))
Exemple #26
0
 def __del__(self):
     if get_config()['CLEAN_CACHE']:
         os.unlink(self)
Exemple #27
0
    app.connect(app, QtCore.SIGNAL('really_run()'), _c.start)


def parse_options(args):
    # TODO: move to argparse
    opts = {'config': []}
    if len(args) > 1:
        opts['config'].append(args[1])
    return opts


if __name__ == '__main__':
    import sys
    #logging.basicConfig(filename=None, level=logging.DEBUG)
    opts = parse_options(sys.argv)
    log.ColoredLogger.default_level = logging.DEBUG
    logger.info('start')
    app = Tamarradio(sys.argv)
    app.connect(app, QtCore.SIGNAL('start_app()'), main)
    get_config().from_pyfile("default_config.py")
    get_config().from_pyfile("/etc/tamarradio/player.cfg", silent=True)
    for configfile in opts['config']:
        get_config().from_pyfile(configfile)
    for d in get_config()['LIBRARIES_PATH']:
        get_libraries().update(find_libraries(d))

    signal.signal(signal.SIGINT, lambda *args: app.quit())
    ret = app.exec_()
    logger.info('end %d' % ret)
    sys.exit(ret)
Exemple #28
0
    app.connect(app, QtCore.SIGNAL('really_run()'), _c.start)


def parse_options(args):
    # TODO: move to argparse
    opts = {'config': []}
    if len(args) > 1:
        opts['config'].append(args[1])
    return opts


if __name__ == '__main__':
    import sys
    #logging.basicConfig(filename=None, level=logging.DEBUG)
    opts = parse_options(sys.argv)
    log.ColoredLogger.default_level = logging.DEBUG
    logger.info('start')
    app = Tamarradio(sys.argv)
    app.connect(app, QtCore.SIGNAL('start_app()'), main)
    get_config().from_pyfile("default_config.py")
    get_config().from_pyfile("/etc/tamarradio/player.cfg", silent=True)
    for configfile in opts['config']:
        get_config().from_pyfile(configfile)
    for d in get_config()['LIBRARIES_PATH']:
        get_libraries().update(find_libraries(d))

    signal.signal(signal.SIGINT, lambda *args: app.quit())
    ret = app.exec_()
    logger.info('end %d' % ret)
    sys.exit(ret)
Exemple #29
0
def get_settings():
    config = get_config()
    return jsonify(config)
 def get(self):
     self.response.headers['Content-Type'] = 'text/plain'
     self.response.write(repr(
         config_manager.get_config('OPERATIONAL Job Fetch Bots')))
Exemple #31
0
 def __init__(self):
     Bottle.__init__(self)
     self._route()
     self.db = RecDB(get_config()['DB_URI'])
Exemple #32
0
 def pre_quit(self):
     self.log.info("About to quit, cleaning up...")
     for name in os.listdir(get_config()['CACHE_DIR']):
         if name.endswith('.wav'):
             os.unlink(os.path.join(get_config()['CACHE_DIR'], name))