Esempio n. 1
0
def delete_event(body):
    event_id = body.get('eventId')

    session = db.create_session()
    orders_querystr = '''select taco.id
                        from Orders o
                        join Taco_Order taco on taco.order_id=o.id
                        join Taco_Ingredient ti on taco.id=ti.order_id
                        join Ingredients ing on ing.id=ti.ingredient_id
                        where o.event_id=:event_id group by taco.id'''
    query_result = session.execute(orders_querystr, {"event_id": event_id})

    orders = []
    for result in query_result:
        orders.append(result[0])

    session.commit()
    session.flush()
    session.close()

    # delete each taco order and its ingredients
    for order in orders:
        removeTaco({'taco_order_id': order})

    # delete the event
    session = db.create_session()
    event = session.query(Event).filter(Event.id == event_id)
    event.delete()
    session.commit()
    session.flush()
    session.close()
Esempio n. 2
0
def gen_session_code():
    while True:
        session_code = "%s %s" % (random.randint(100, 1000), random.randint(100, 1000))
        if not db.get_session(session_code):
            break
    db.create_session(session_code)
    return jsonify({"game_code": session_code})
Esempio n. 3
0
def create_app(config_name):
    app = Flask(__name__)
    ###
    app.config.from_object(config[config_name])
    ###
    flask_backend_session = create_session(
        app.config['SQLALCHEMY_BINDS']['flask_backend'])
    hacker_news_session = create_session(
        app.config['SQLALCHEMY_BINDS']['hacker_news'])

    @app.before_request
    def pass_session():
        g.flask_backend_session = flask_backend_session
        g.hacker_news_session = hacker_news_session

    with app.app_context():
        from api.bp import api_bp
        app.register_blueprint(api_bp)

    @app.teardown_appcontext
    def shutdown_session(exception=None):
        if g.flask_backend_session:
            g.flask_backend_session.remove()
        if g.hacker_news_session:
            g.hacker_news_session.remove()

    return app
Esempio n. 4
0
def check_uploaded_acts(acts_ids):
    in_session = create_session(DATABASES['main'].metadata.bind)
    out_session = create_session(DATABASES['outside'].metadata.bind)
    uploaded_acts = out_session.query(UploadLog).filter(UploadLog.external_id.in_(acts_ids)).all()
    uploaded_acts_ids = [uploaded_act.external_id for uploaded_act in uploaded_acts]
    set_upload_for_acts(in_session, uploaded_acts_ids)

    return len(set(uploaded_acts_ids)) == len(set(acts_ids))
Esempio n. 5
0
    def __init__(self, in_engine, out_engine):
        if in_engine and out_engine:
            self.out_session = create_session(
                DATABASES['outside'].metadata.bind)
            self.in_session = create_session(DATABASES['main'].metadata.bind)

            self._clear_objects_buffer()
        else:
            raise AttributeError
Esempio n. 6
0
def upload_csv():
    import time
    if request.method == 'POST':
        # check if the post request has the file part
        if 'file' not in request.files:
            flash('No file part')
            return redirect(request.url)
        file = request.files['file']
        # if user does not select file, browser also
        # submit an empty part without filename
        if file.filename == '':
            flash('No selected file')
            return redirect(request.url)
        if file and allowed_file(file.filename, ALLOWED_EXTENSIONS):
            filename = secure_filename(file.filename)
            ext = filename.split('.')[-1]
            f = file.stream.read().decode("utf-8")
            rows = [row.split(",") for row in f.split("\n")]
            db_session = create_session(engine)
            Catalog.from_csv_list(rows).save_to_db(db_session)
            file.save(
                os.path.join(app.config['UPLOAD_FOLDER'],
                             time.strftime(f"%Y%m%d-%H%M%S.{ext}")))
            flash('Les données ont bien été importées')

    return render_template('upload.html')
Esempio n. 7
0
def removeTaco(body):
    taco_id = body.get('taco_order_id')
    session = db.create_session()
    taco_order = session.query(Taco_Order).filter(Taco_Order.id == taco_id)
    order_id = taco_order.first().as_dict()["order_id"]
    taco_ingredients = session.query(Taco_Ingredient).filter(
        Taco_Ingredient.order_id == taco_id)

    taco_order.delete()
    taco_ingredients.delete()

    remaining_taco_orders = session.query(func.count(Taco_Order.id))\
        .filter(Taco_Order.id != taco_id)\
        .filter(Taco_Order.order_id == order_id)\
        .scalar()

    if remaining_taco_orders == 0:
        order = session.query(Order.id).filter(Order.id == order_id)
        order.delete()

    session.commit()
    session.flush()
    session.close()

    if remaining_taco_orders > 0:
        calculate_order_cost(order_id)

    return
Esempio n. 8
0
def get_music_spectrum_centroid(music_id):
    session = create_session()
    user_id = g.current_user['sub']
    music = session.query(Music).filter_by(user_id=user_id,
                                           id=music_id).first()
    session.close()
    return jsonify({'values': spectrum_centroid(music)})
Esempio n. 9
0
def events(event_id: hug.types.number = 0):
    events = []

    session = db.create_session()
    query = session.query(Event, User, Location)\
        .join(User, Event.user_id == User.email)\
        .join(Location, Event.location_id == Location.id)

    if event_id:
        query = query.filter(Event.id == event_id)

    query_result = query.all()

    if not query_result:
        return []

    for result in query_result:
        d = {}
        event = result[0]
        user = result[1]
        location = result[2]

        d['event'] = event.as_dict()
        d['user'] = user.as_dict()
        d['location'] = location.as_dict()

        events.append(d)

    return events
Esempio n. 10
0
def get_student_music_f0(user_id, music_id):
    session = create_session()
    """
    user = session.query(User).filter_by(
        name=user_name).first()
    """
    user_id = user_id.replace("%", "|")
    music = session.query(Music).filter_by(user_id=user_id,
                                           id=music_id).first()

    f0 = music.fundamental_frequency(session)

    average = frequency_ave_data(music)
    times = librosa.times_like(f0, sr=48000)

    start, end = find_start_end(music)
    data = []
    j = 0
    if end < len(f0) - 2:
        end += 1
    for i in range(max(0, start), end):
        if f0[i] >= 0:
            dic = {"x": round(times[i], 2), "y": round(f0[i], 4)}
        else:
            dic = {"x": round(times[i], 2), "y": 0}
        j += 1
        data.append(dic)

    session.close()
    return jsonify({'average': average[1], 's': average[0], 'values': data})
Esempio n. 11
0
def put_my_list(id):
    session = create_session()
    user_id = g.current_user['sub']

    data = session.query(TvLIst).filter_by(id=id).first()
    data = data.to_json()

    user_t = session.query(UserTvLIst).filter_by(user_id=user_id).all()
    for i in range(len(user_t)):  # ダブりがあったら追加しない
        if user_t[i].date == data["date"] and user_t[i].channel == data[
                'channel']:
            return "resive"

    print(data)
    toMyList = UserTvLIst(user_id=user_id,
                          channel=data['channel'],
                          date=data['date'],
                          name=data['name'],
                          artist=data['artist'],
                          start_time=data['start_time'],
                          end_time=data['end_time'],
                          comment=data['comment'],
                          check=0)

    session.add(toMyList)
    session.commit()
    session.close()

    return "resive"
Esempio n. 12
0
def import_species_csv(csv_file, col_index=0, delimiter=',', quote_char='"'):
    session = create_session()
    species_name_list = []
    with open(csv_file, 'rb') as csv_file:
        data_list = csv.reader(csv_file, delimiter=delimiter, quotechar=quote_char)
        for row in data_list:
            species_name = row[col_index]
            species_name_list.append(species_name)

    import collections

    duplicate_list = [x for x, y in collections.Counter(species_name_list).items() if y > 1]
    unique_list = list(set(species_name_list))

    for species_name in unique_list:
        species_exits = session.query(Species).filter(Species.species_name == species_name).count()
        if not species_exits:
            species_obj = Species(species_name=species_name)
            session.add(species_obj)
        else:
            # NOTICE: exists species was treat as duplicate
            duplicate_list.append(species_name)
    session.commit()

    return unique_list, duplicate_list
Esempio n. 13
0
def get_student_folders_parallel(user_id, folder_id):
    session = create_session()
    musics = session.query(Music).filter_by(user_id=user_id.replace("%", "|"),
                                            folder_id=folder_id).order_by(
                                                Music.id).all()

    Datas = []
    for music in musics:
        Datas.append([
            music.id,
            frequency_ave_data(music),
            spectrum_rolloff_ave(music),
            decibel_ave_data(music)
        ])

    for i in range(len(Datas)):
        Datas[i].append(Datas[i][1][0] + Datas[i][3][0] + Datas[i][2][0])

    dicDatas = []
    j = 1
    for i in range(len(Datas) - 1, -1, -1):
        if j > 10:
            break
        dic = {
            "No.": j,
            "高さ": Datas[i][1][0],
            "音色": Datas[i][2][0],
            "強さ": Datas[i][3][0],
        }
        j += 1
        dicDatas.append(dic)
    dicDatas = sorted(dicDatas, key=lambda x: x["No."], reverse=True)
    session.close()
    return jsonify(dicDatas)
Esempio n. 14
0
def get_all_list():
    session = create_session()
    user_id = g.current_user['sub']
    data = session.query(TvLIst).all()
    data = [d.to_json() for d in data if d.creater != user_id]
    session.close()
    return jsonify(data)
Esempio n. 15
0
def delete_music(music_id):
    session = create_session()
    user_id = g.current_user['sub']
    session.query(Music).filter_by(id=music_id, user_id=user_id).delete()
    session.commit()
    session.close()
    return jsonify({'message': 'deleted'})
Esempio n. 16
0
def get_folder_progress(folder_id):
    session = create_session()
    user_id = g.current_user['sub']
    musics = session.query(Music).filter_by(user_id=user_id,
                                            folder_id=folder_id).order_by(
                                                Music.id).all()

    Datas = []
    for music in musics:
        Datas.append([
            music.id,
            frequency_ave_data(music),
            spectrum_rolloff_ave(music),
            decibel_ave_data(music)
        ])

    for i in range(len(Datas)):
        Datas[i].append(Datas[i][1][0] + Datas[i][3][0] + Datas[i][2][0])

    dicDatas = []
    j = 1
    for i in range(len(Datas) - 1, -1, -1):
        dic = {
            # "x": Datas[i][0],
            "x": j,
            "y": round(Datas[i][4], 4)
        }
        dicDatas.append(dic)
        j += 1
    dicDatas = sorted(dicDatas, key=lambda x: x["x"], reverse=True)

    session.close()
    return jsonify(dicDatas)
Esempio n. 17
0
def delete_comment(comment_id):
    session = create_session()
    user_id = g.current_user['sub']
    session.query(Comment).filter_by(id=comment_id, user_id=user_id).delete()
    session.commit()
    session.close()
    return {"message": "deleted"}
Esempio n. 18
0
def get_student_folder(user_id):
    session = create_session()
    folders = session.query(Folder).filter_by(
        user_id=user_id.replace("%", "|")).all()
    folders = [f.to_json() for f in folders]
    session.close()
    return jsonify(folders)
Esempio n. 19
0
def put_music_content(music_id):
    session = create_session()
    user_id = g.current_user['sub']
    data = json.loads(request.data.decode('utf-8'))
    music = session.query(Music).filter_by(user_id=user_id,
                                           id=music_id).first()

    if 'name' in data:
        music.name = data['name']
    if 'folderId' in data:
        music.folder_id = data['folderId']

    if 'assessment' in data:
        music.assessment = data['assessment']
    if 'comment' in data:
        comment = Comment(music_id=music_id,
                          text=data['comment'],
                          user_id=user_id)
        session.add(comment)

    music.name = music.created
    jst_time = music.name + datetime.timedelta(hours=9)
    time_str = jst_time.strftime('%Y/%m/%d  %H:%M:%S')
    music.name = time_str
    time_str = jst_time
    session.add(music)
    session.commit()
    session.close()
    return {"message": "updated"}
Esempio n. 20
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '--log_level',
        default='info',
        choices=['debug', 'info', 'warning', 'error', 'critical'],
        help='Verbosity of logging')
    parser.add_argument(
        '--watch',
        action='store_true',
        help=
        'Automatically restart the server if source code or templates are changed'
    )
    args = parser.parse_args()

    log_level = getattr(logging, args.log_level.upper())
    logging.basicConfig(level=log_level)

    global session  # pylint: disable=global-statement
    global recording_overrides  # pylint: disable=global-statement
    session = db.create_session(
        os.path.join(os.path.dirname(__file__), 'master.db'))
    recording_overrides = RecordingOverrides()

    host = 'localhost'
    port = 8080
    logging.info(f'Launching web server on http://{host}:{port}/')
    # Database session is not thread safe, so we need to disable threading here.
    app.run(host=host, port=port, debug=args.watch, threaded=False)
Esempio n. 21
0
def collect_record_data(root_dir):
    session = create_session()
    species_name_list = session.query(Species).filter(Species.in_process == False, Species.name_correct == True).all()

    for species_obj in species_name_list:
        species_name = species_obj.species_name
        species_key = species_obj.species_key

        obj = OccurrenceSearch()
        data, no_data = obj.search(taxon_key=species_key)
        species_obj = session.query(Species).filter(Species.species_name == species_name).one()
        species_obj.in_process = True
        session.commit()

        if len(no_data):
            species_obj.have_un_coordinate_data = True
            session.commit()
            # species_obj.un_coordinate_data = json.dumps(no_data)
            species_obj.un_coordinate_data = json.dumps(no_data)
            session.commit()

        if len(data):
            for item in data:
                longitude = item[0]
                latitude = item[1]
                country_code = item[2]
                occurrence_obj = Occurrence(
                    species_name=species_name, longitude=longitude, latitude=latitude, country_code=country_code
                )
                session.add(occurrence_obj)
                session.commit()
        else:
            species_obj.no_data = True
            session.commit()
    return None
Esempio n. 22
0
def get_user_list():
    session = create_session()
    user_id = g.current_user['sub']
    # get user list
    user_t = session.query(User).all()
    user_list = list(user_t[i].id for i in range(len(user_t)))
    # print(user_list)
    registered = False
    # check registerd
    for _id in user_list:
        if _id == user_id:
            registered = True
            break
    # register
    if registered == False:
        u = User(id=user_id, notification_allow=1)
        session.add(u)
        session.commit()
        user_noti = UserNotification(user_id=user_id, time="pre/20:00")
        session.add(user_noti)
        session.commit()

    data = session.query(UserTvLIst).filter_by(user_id=user_id).all()
    data = [d.to_json() for d in data]
    # print(data)
    #data = OrderedDict(data)
    # print(data)
    session.close()
    return jsonify(data)
Esempio n. 23
0
def upgrade_act_dates():
    session = create_session(DATABASES['main'].metadata.bind)
    for act in session.query(Act).filter(Act.act_date == None):
        print(act)
        date = datetime.strptime(act.date, '%d.%m.%y').date()
        act.act_date = date
    session.commit()
Esempio n. 24
0
def delete_user_program_list(id):
    session = create_session()
    user_id = g.current_user['sub']
    session.query(UserTvLIst).filter_by(user_id=user_id, id=id).delete()
    session.commit()
    session.close()
    return get_user_list()
Esempio n. 25
0
def change_storekeeper():
    session = create_session(DATABASES['main'].metadata.bind)
    for act in session.query(Act):
        if act.storekeeper == 'Скубиро А.Л.':
            act.storekeeper = 'Донец Д.Н.'
    session.commit()
    print('Chacge storekeeper complete!!!')
Esempio n. 26
0
def calculate_order_cost(order_id):
    session = db.create_session()

    order = session.query(Order).get(order_id)
    order_data = order.as_dict()

    location_ingredients = ingredients(event_id=order_data["event_id"])
    event_data = session.query(Event).get(order_data["event_id"]).as_dict()
    location_data = session.query(Location).get(
        event_data["location_id"]).as_dict()

    order_cost = 0

    taco_orders = session.query(Taco_Order).filter(
        Taco_Order.order_id == order_id)
    for taco_order in taco_orders:
        order_cost = float(order_cost) + \
            float(location_data["base_taco_price"])

        taco_ings = session.query(Taco_Ingredient).filter(
            Taco_Ingredient.order_id == taco_order.id)
        for taco_ing in taco_ings:
            for ing in location_ingredients:
                if taco_ing.ingredient_id == ing["id"]:
                    order_cost = float(order_cost) + float(ing["price"])

    order.order_amount = order_cost
    session.add(order)
    session.commit()
    session.flush()
    session.close()
Esempio n. 27
0
def location(body):
    try:
        session = db.create_session()
        param_id = body.get('id', None)
        existing_location = session.query(Location).get(
            param_id) if param_id else Location()

        # Merge any existing object with the passed in object
        location = session.merge(existing_location)
        location.id = body.get('id', existing_location.id)
        location.name = body.get('name', existing_location.name)
        location.street_address = body.get('street_address',
                                           existing_location.street_address)
        location.city = body.get('city', existing_location.city)
        location.state = body.get('state', existing_location.state)
        location.zip = body.get('zip', existing_location.zip)
        location.phone_number = body.get('phone_number',
                                         existing_location.phone_number)
        location.hours = body.get('hours', existing_location.hours)
        location.base_taco_price = body.get('base_taco_price',
                                            existing_location.base_taco_price)

        session.commit()  # To prevent lock on the table
        session.add(location)  # Add the new object to the session
        session.flush()  # Commits and flushes
        session.close()
        return 'SUCCESS: Updated the TACOBASE'
    except Exception as Error:
        print(Error)
        raise Error
def cross_check(work_dir="."):
    session = create_session()

    occurrence_list = session.query(Occurrence).filter(Occurrence.cross_check == None).all()
    session.close()

    queue = multiprocessing.Queue()
    for item in occurrence_list:
        item_data = [item.id, item.longitude, item.latitude, item.country_code]
        queue.put(item_data)

    cpu_core_count = multiprocessing.cpu_count()
    multiprocess_core = cpu_core_count - 1

    process_pool = []
    for index in range(multiprocess_core):
        process_obj = multiprocessing.Process(target=worker, args=(queue, work_dir))
        process_obj.daemon = True
        process_obj.start()
        process_pool.append(process_obj)

    for process_obj in process_pool:
        process_obj.join()

    return None
Esempio n. 29
0
def order(body):
    try:
        session = db.create_session()
        param_id = body.get('id', None)
        existing_order = session.query(Order).get(
            param_id) if param_id else Order()

        # Merge any existing object with the passed in object
        order = session.merge(existing_order)
        order.id = body.get('id', existing_order.id)
        order.user_id = body.get('user_id', existing_order.user_id)
        order.event_id = body.get('event_id', existing_order.event_id)
        order.payment_amount = body.get('payment_amount',
                                        existing_order.payment_amount)
        order.order_amount = body.get('order_amount',
                                      existing_order.order_amount)

        session.commit()  # To prevent lock on the table
        session.add(order)  # Add the new object to the session
        session.flush()  # Commits and flushes
        session.close()
        return 'SUCCESS: Updated the TACOBASE'
    except Exception as Error:
        print(Error)
        raise Error
Esempio n. 30
0
def event_orders(event_id: hug.types.number, user_id: hug.types.text = ''):
    orders_list = []

    session = db.create_session()

    orders = session.query(Order).filter(Order.event_id == event_id)
    if user_id:
        orders.filter(Order.user_id == user_id)

    for order in orders:
        enriched_order = order.as_dict()

        enriched_order["taco_orders"] = []

        querystr = '''select
                                taco.id,
                                taco.order_id,
                                group_concat(ing.name, ', ')
                    from Taco_Order taco
                    join Taco_Ingredient ti on taco.id=ti.order_id
                    join Ingredients ing on ing.id=ti.ingredient_id
                    where taco.order_id=:taco_order_id group by taco.id'''

        query_result = session.execute(querystr,
                                       {"taco_order_id": enriched_order["id"]})
        for result in query_result:
            enriched_order["taco_orders"].append({
                "taco_id": result[0],
                "order_id": result[1],
                "ingredient_desc": result[2]
            })

        orders_list.append(enriched_order)

    return orders_list
Esempio n. 31
0
def ingredient(body):
    try:
        session = db.create_session()
        param_id = body.get('id', None)
        existing_ingredient = session.query(Ingredient).get(
            param_id) if param_id else Ingredient()

        # Merge any existing object with the passed in object
        ingredient = session.merge(existing_ingredient)
        ingredient.id = body.get('id', existing_ingredient.id)
        ingredient.name = body.get('name', existing_ingredient.name)
        ingredient.description = body.get('description',
                                          existing_ingredient.description)
        ingredient.price = body.get('price', existing_ingredient.price)
        ingredient.location_id = body.get('location_id',
                                          existing_ingredient.location_id)

        session.commit()  # To prevent lock on the table
        session.add(ingredient)  # Add the new object to the session
        session.flush()  # Commits and flushes
        session.close()
        return 'SUCCESS: Updated the TACOBASE'
    except Exception as Error:
        print(Error)
        raise Error
Esempio n. 32
0
def create_location(body):
    try:
        session = db.create_session()
        location = Location()

        location.name = body.get('name') or ''
        location.street_address = body.get('street_address') or ''
        location.city = body.get('city') or ''
        location.state = body.get('state') or ''
        location.zip = body.get('zip') or ''
        location.phone_number = body.get('phone_number') or ''
        location.hours = body.get('hours') or ''
        location.base_taco_price = body.get('base_taco_price') or 0

        session.commit()  # To prevent lock on the table
        session.add(location)  # Add the new object to the session
        session.flush()  # Commits and flushes

        ingredients = body.get('ingredient_list') or []
        add_location_ingredients(session, location.id, ingredients)

        update_location = session.merge(location)
        session.add(update_location)  # Add the new object to the session
        session.commit()
        session.flush()  # Commits and flushes
        session.close()
        return {"success": True, "message": "Location created"}

    except Exception as Error:
        print(Error)
        raise Error
Esempio n. 33
0
def create_session():
    session_id = db.create_session()
    cookies_object = Cookie.SimpleCookie()
    cookies_object["session_id"] = session_id
    cookies_object["images_visited"] = '{}'
    print(cookies_object.output())  #upisivanje cookie-a u header
    return session_id
def worker(queue, work_dir):
    # register signal process function
    signal.signal(signal.SIGTERM, signal_handler)

    session = create_session()
    feature_dict = {}

    base_dir = os.path.join(work_dir, 'gadm')
    dir_name_list = [o for o in os.listdir(base_dir) if os.path.isdir(os.path.join(base_dir, o))]

    for dir_name in dir_name_list:
        feature_name = dir_name.split("_")[0]
        shape_file = os.path.join(base_dir, dir_name + "/" + dir_name + "0.shp")
        driver = ogr.GetDriverByName("ESRI Shapefile")
        data_source = driver.Open(shape_file, 0)
        layer = data_source.GetLayer()

        feature = layer[0]
        feature_dict[feature_name] = feature

    while True:
        try:
            oid, longitude, latitude, country_code = queue.get(True, 1000)
        except Queue.Empty:
            break
        point = ogr.Geometry(ogr.wkbPoint)
        point.AddPoint(longitude, latitude)

        if country_code is None:
            session.query(Occurrence).filter(Occurrence.id == oid).update({"cross_check": -3},
                                                                          synchronize_session=False)
            session.commit()
            continue
        elif country_code in countries:
            country_data_alpha_3 = countries.get(country_code)[2]
            if country_data_alpha_3 in feature_dict.keys():
                poly = feature_dict[country_data_alpha_3]
                poly = poly.geometry()
            else:
                session.query(Occurrence).filter(Occurrence.id == oid).update({"cross_check": -1},
                                                                              synchronize_session=False)
                session.commit()
                return None
        else:
            session.query(Occurrence).filter(Occurrence.id == oid).update({"cross_check": -2},
                                                                          synchronize_session=False)
            session.commit()
            return None

        intersection = poly.Intersects(point)

        if intersection:
            session.query(Occurrence).filter(Occurrence.id == oid).update({"cross_check": 1}, synchronize_session=False)
            session.commit()
        else:
            session.query(Occurrence).filter(Occurrence.id == oid).update({"cross_check": 0}, synchronize_session=False)
            session.commit()

    return None
Esempio n. 35
0
def handle_starts(args):
    conn, path = instantiate_db(True)

    games = db.search_game(conn, args.title)[:5]

    print('Which game?')
    print(rp.format_records(games,
        ['title'],
        header = True, nums = True))

    which_game = input('Input number, or q to abort: ')
    if which_game.lower() == 'q':
        return

    gid, title = (games[int(which_game) - 1]['id'],
                  games[int(which_game) - 1]['title'])
    db.create_session(conn, gid)

    print('Created session for %s' % (title))

    db.dump_csvs(conn, path)
def check_species_data():
    db_session = create_session()
    species_name_list = db_session.query(Species.species_name).filter(Species.name_correct == None).all()
    work_queue = Queue.Queue()

    for i in range(10):
        t = ThreadWorker(work_queue, db_session)
        t.setDaemon(True)
        t.start()

    for species_obj in species_name_list:
        species_name = species_obj.species_name
        work_queue.put(species_name)

    # wait on the queue until everything has been processed
    work_queue.join()

    return None
Esempio n. 37
0
import db
import json

from yikyakapi import yikyak

with open('config.json') as fd:
  config = json.loads(fd.read())

client = yikyak.YikYak();
if 'id' in config:
  client.login_id(config['country'], config['phone'], config['id'])
else:
  pin = input('PIN? ')
  client.login(config['country'], config['phone'], pin)
  print('Your user ID is:', client.yakker.userID)

engine = db.create_engine(config['db'])
session = db.create_session(engine)
Esempio n. 38
0
def use(name):
    session = create_session(name)
    genome = config.use_genome(name)
    return session, genome
Esempio n. 39
0
def cross_check(work_dir="."):
    session = create_session()
    feature_dir = {}

    base_dir = os.path.join(work_dir, "gadm")
    dir_name_list = [o for o in os.listdir(base_dir) if os.path.isdir(os.path.join(base_dir, o))]

    for dir_name in dir_name_list:
        feature_name = dir_name.split("_")[0]
        shape_file = os.path.join(base_dir, dir_name + "/" + dir_name + "0.shp")
        driver = ogr.GetDriverByName("ESRI Shapefile")
        data_source = driver.Open(shape_file, 0)
        layer = data_source.GetLayer()

        feature = layer[0]
        feature_dir[feature_name] = feature

    occurrence_list = session.query(Occurrence).filter(Occurrence.cross_check == None).all()

    for item in occurrence_list:
        oid = item.id
        longitude = item.longitude
        latitude = item.latitude
        country_code = item.country_code
        point = ogr.Geometry(ogr.wkbPoint)
        point.AddPoint(longitude, latitude)

        if country_code is None:
            session.query(Occurrence).filter(Occurrence.id == oid).update(
                {"cross_check": -3}, synchronize_session=False
            )
            session.commit()
            continue
        elif country_code in countries:
            country_data_alpha_3 = countries.get(country_code)[2]
            if country_data_alpha_3 in feature_dir.keys():
                poly = feature_dir[country_data_alpha_3]
                poly = poly.geometry()
            else:
                session.query(Occurrence).filter(Occurrence.id == oid).update(
                    {"cross_check": -1}, synchronize_session=False
                )
                session.commit()
                continue
        else:
            session.query(Occurrence).filter(Occurrence.id == oid).update(
                {"cross_check": -2}, synchronize_session=False
            )
            session.commit()
            continue

        intersection = poly.Intersects(point)

        # print(intersection)
        if intersection:
            session.query(Occurrence).filter(Occurrence.id == oid).update({"cross_check": 1}, synchronize_session=False)
            session.commit()
        else:
            session.query(Occurrence).filter(Occurrence.id == oid).update({"cross_check": 0}, synchronize_session=False)
            session.commit()

    session.close()
    return None
Esempio n. 40
0
def handle_select(args):
    conn, path = instantiate_db(True)

    passed_ids = []
    while True:
        owned = True
        if args.buy:
            owned = None
        if args.buy_only:
            owned = False

        games = db.select_random_games(conn, n = args.n, before_this_year = True if args.old else None,
                linux = True if args.linux else None, couch = True if args.couch else None,
                owned = owned, max_passes = args.max_passes,
                exclude_ids = passed_ids, storefront = args.storefront)

        annotate_platforms(conn, games)
        print(rp.format_records(games,
            ['title', 'linux', 'couch', 'platforms', 'via'],
            header = True, nums = True))

        # If we're just displaying a selection, finish here
        if not args.pick:
            break

        print('\nChoose a game to create a new active session for. Input 0 to pass on all games. Q to abort.')
        selection = input("Selection: ")

        if selection == 'q' or selection == 'Q':
            break

        selection = int(selection)

        if selection == 0:
            # Increment the pass counter on each game
            for game in games:
                # Don't propose game again
                passed_ids.append(game['id'])

                # If eternal still undecided 
                # give option to make eternal
                if game['eternal'] is None:
                    eternal = input('Should this game never stop being proposed? Y/N/P[ass]: ')
                    if eternal == 'Y' or eternal == 'y':
                        db.set_eternal(conn, game['id'], 1)
                    elif eternal  == 'N' or eternal == 'n':
                        db.set_eternal(conn, game['id'], 0)


                # If the game is not out yet, don't increment
                if game['release_year'] != None and game['release_year'] != '' and int(game['release_year']) == date.today().year:
                    freebie = input('%s was released this year. Has it been released? Y/N: ' % game['title'])
                    if freebie == 'N' or freebie == 'n':
                        continue
                new_passes = db.inc_pass(conn, game['id'])

                # Delay next possible proposal according to passes
                if new_passes == 1:
                    db.set_next_valid_date(conn, game['id'],
                        date.today() + datetime.timedelta(days = 7))
                elif new_passes == 2:
                    db.set_next_valid_date(conn, game['id'],
                        date.today() + datetime.timedelta(days = 30))
                else:
                    db.set_next_valid_date(conn, game['id'],
                        date.today() + datetime.timedelta(days = 90))
                
        else:
            # Create an active session
            game = games[selection - 1]
            db.create_session(conn, game['id'])
            print('Created a new session of %s.' % game['title'])
            break

        print('\n')

    # So scared of commitment
    db.dump_csvs(conn, path)
Esempio n. 41
0
def export_data(target_dir, output_format="csv", country_list=None, output_type=None):
    session = create_session()
    species_list = session.query(Occurrence.species_name).distinct(Occurrence.species_name).filter(
        Occurrence.cross_check == 1).all()
    for species in species_list:
        species_name = species.species_name
        if country_list is None:
            if output_type is None:
                occurrence_data_set = session.query(Occurrence).filter(Occurrence.species_name == species_name).all()
            else:
                occurrence_data_set = session.query(Occurrence).filter(Occurrence.species_name == species_name,
                                                                       Occurrence.cross_check.in_(output_type)).all()

        else:
            if output_type is None:
                occurrence_data_set = session.query(Occurrence).filter(Occurrence.species_name == species_name,
                                                                       Occurrence.country_code.in_(country_list)).all()
            else:
                occurrence_data_set = session.query(Occurrence).filter(Occurrence.species_name == species_name,
                                                                       Occurrence.country_code.in_(country_list),
                                                                       Occurrence.cross_check.in_(output_type)).all()
        if output_format == "csv":
            output_file = os.path.join(target_dir, species_name + ".csv")
            fd = open(output_file, "wb")
            csv_writer = csv.writer(fd)
            for occurrence_point in occurrence_data_set:
                longitude = occurrence_point.longitude
                latitude = occurrence_point.latitude
                csv_writer.writerow([longitude, latitude, occurrence_point.cross_check])
            fd.close()
        elif output_format == "shp":
            output_dir = os.path.join(target_dir, species_name)
            if not os.path.isdir(output_dir):
                os.mkdir(output_dir)

            # create the data source
            output_file = os.path.join(output_dir, "presence.shp")

            driver = ogr.GetDriverByName("ESRI Shapefile")

            data_source = driver.CreateDataSource(output_file)

            # create the spatial reference, WGS84
            srs = osr.SpatialReference()
            srs.ImportFromEPSG(4326)

            # create the layer
            layer = data_source.CreateLayer("main", srs, ogr.wkbPoint)

            # Add the fields we're interested in
            field_name = ogr.FieldDefn("Species", ogr.OFTString)
            field_name.SetWidth(24)
            layer.CreateField(field_name)

            field_name = ogr.FieldDefn("Country", ogr.OFTString)
            field_name.SetWidth(24)
            layer.CreateField(field_name)

            field_name = ogr.FieldDefn("CrossCheck", ogr.OFTInteger)
            layer.CreateField(field_name)

            # multipoint = ogr.Geometry(ogr.wkbMultiPoint)

            for occurrence_point in occurrence_data_set:
                longitude = occurrence_point.longitude
                latitude = occurrence_point.latitude
                # point = (longitude, latitude)
                # point_object = ogr.Geometry(ogr.wkbPoint)
                # point_object.AddPoint(*point)
                # multipoint.AddGeometry(point_object)

                feature = ogr.Feature(layer.GetLayerDefn())
                # Set the attributes using the values from the delimited text file
                print str(occurrence_point.species_name)
                feature.SetField("Species", str(occurrence_point.species_name))
                feature.SetField("Country", str(occurrence_point.country_code))

                if occurrence_point.cross_check is None:
                    # TODO: In case of cross check is not finished
                    cross_check = -3
                else:
                    cross_check = int(occurrence_point.cross_check)
                feature.SetField("CrossCheck", cross_check)

                # Create the WKT for the feature using python string formatting
                wkt = "POINT(%f %f)" % (float(longitude), float(latitude))

                # Create the point from the Well Know Txt
                point = ogr.CreateGeometryFromWkt(wkt)

                # Set the feature geometry using the point
                feature.SetGeometry(point)
                # Create the feature in the layer (shapefile)
                layer.CreateFeature(feature)
                # Destroy the feature to free resources
                feature.Destroy()

            data_source.Destroy()
Esempio n. 42
0
	# Load the configuration defined in a separate file for the production environment.
	prod_config_path = os.environ.get('MSG_PROD_CONFIG', '../etc/matchstreamguide.cfg')
	app.config.from_pyfile(prod_config_path)
else:
	# Load the configuration for the environment that is not production.
	app.config.from_object(_get_qualified_name(_CONFIGURATIONS[environment]))


# Remove some whitespace from the HTML.
app.jinja_env.trim_blocks = app.config['JINJA_TRIM_BLOCKS']
# Compile Coffeescript with the top-level function safety wrapper.
env = Environment(app)
env.config['coffee_no_bare'] = app.config['COFFEE_NO_BARE']
# Compile Sass.
css = Bundle('style.scss', filters=app.config['SCSS_FILTERS'], output='gen/style.css')
env.register('all_css', css)
# Compile CoffeeScript.
app_js = Bundle('app.coffee', filters='coffeescript', output='gen/app.js')
settings_js = Bundle(
		'settings.coffee', filters=app.config['COFFEESCRIPT_FILTERS'], output='gen/settings.js')
env.register('app_js', app_js)
env.register('settings_js', settings_js)

db.create_session(app.config['DATABASE'], app.config['DATABASE_URI'])

if environment != 'test':
	@app.teardown_request
	def shutdown_session(exception=None):
			db.session.remove()