예제 #1
0
def get_campaigns_by_bloodtype():
    session = db.Session()
    user_id = request.args.get('user_id', 0)

    # filter by user Blood Type
    user = session.query(db.User).filter_by(user_id=user_id).first()
    if not user:
        session.close()
        return ApiResponse({
            'status':
            'error',
            'message':
            'No user with id {0} found'.format(user_id)
        })

    campaigns_blood = session.query(
        db.CampaignBlood).filter_by(blood_type=user.blood_type).all()
    campaigns = [{
        'name': c.campaign.name,
        'hospital': {
            'name': c.campaign.hospital.name,
            'latitude': c.campaign.hospital.latitude,
            'longitude': c.campaign.hospital.longitude,
        },
        'message': c.campaign.message,
        'start_date': to_timestamp(c.campaign.start_date),
        'end_date': to_timestamp(c.campaign.end_date)
    } for c in campaigns_blood]
    session.close()

    # return data
    return ApiResponse({"campaigns": campaigns})
예제 #2
0
 def as_dicts(self, convert_timestamps=False):
     """
     Returns dictionary form of this Tag.
     """
     return {
         "name":
         self.name,
         "time_type":
         self.time_type,
         "object_type":
         self.object_type,
         "synchronization":
         self.synchronization,
         "end_of_validity":
         self.end_of_validity,
         "description":
         self.description,
         "last_validated_time":
         self.last_validated_time,
         "insertion_time":
         to_timestamp(self.insertion_time)
         if convert_timestamps else self.insertion_time,
         "modification_time":
         to_timestamp(self.modification_time)
         if convert_timestamps else self.modification_time,
         "record":
         self.record,
         "label":
         self.label
     }
예제 #3
0
 def to_array(self):
     return [
         self.queue, self.tag, self.record, self.label,
         status_full_name(self.status),
         to_timestamp(self.time_submitted),
         to_timestamp(self.last_edited)
     ]
예제 #4
0
 def as_dicts(self, convert_timestamps=False):
     """
     Returns dictionary form of Global Tag object.
     """
     json_gt = {
         'name':
         self.name,
         'validity':
         self.validity,
         'description':
         self.description,
         'release':
         self.release,
         'insertion_time':
         to_timestamp(self.insertion_time)
         if convert_timestamps else self.insertion_time,
         'snapshot_time':
         to_timestamp(self.snapshot_time)
         if convert_timestamps else self.snapshot_time,
         'scenario':
         self.scenario,
         'workflow':
         self.workflow,
         'type':
         self.type
     }
     return json_gt
예제 #5
0
def get_campaigns_by_bloodtype():
    session = db.Session()
    user_id = request.args.get('user_id', 0)

    # filter by user Blood Type
    user = session.query(db.User).filter_by(user_id=user_id).first()
    if not user:
        session.close()
        return ApiResponse({
            'status': 'error',
            'message': 'No user with id {0} found'.format(user_id)
        })

    campaigns_blood = session.query(db.CampaignBlood).filter_by(blood_type=user.blood_type).all()
    campaigns = [
        {
            'name': c.campaign.name,
            'hospital': {
                'name': c.campaign.hospital.name,
                'latitude': c.campaign.hospital.latitude,
                'longitude': c.campaign.hospital.longitude,
            },
            'message': c.campaign.message,
            'start_date': to_timestamp(c.campaign.start_date),
            'end_date': to_timestamp(c.campaign.end_date)
        } for c in campaigns_blood]
    session.close()

    # return data
    return ApiResponse({
        "campaigns": campaigns
    })
예제 #6
0
def create_campaign():
    session = db.Session()
    data = json.loads(request.data)
    hospital_id = request.args.get('hospital_id', 0)

    # hospital = session.query(db.Hospital).filter_by(_id=hospital_id).first()
    hospital = session.query(db.Hospital).first()

    name = data['name']
    message = data['message']
    bloodtypes = data['bloodtypes']
    start_date = datetime.datetime.now()
    end_date = datetime.datetime.now() + datetime.timedelta(days=10)
    campaign = db.Campaign(hospital._id, name, message, start_date, end_date)
    session.add(campaign)
    session.commit()

    for bloodtype in bloodtypes:
        campaign_blood = db.CampaignBlood(campaign._id, bloodtype)
        session.add(campaign_blood)

    session.commit()

    gcmClient = GCMClient(api_key=os.environ.get('GCM_API_KEY'))
    alert = {
        'subject': 'Fushate e re',
        'message': campaign.hospital.name,
        'data': {
            'id': campaign._id,
            'name': name,
            'hospital': {
                'name': campaign.hospital.name,
                'latitude': campaign.hospital.latitude,
                'longitude': campaign.hospital.longitude,
            },
            'message': message,
            'start_date': to_timestamp(start_date),
            'end_date': to_timestamp(end_date)
        }
    }

    interested_users = session.query(db.User).filter(db.User.blood_type.in_(bloodtypes))
    gcm_id_list = [user.gcm_id for user in interested_users]
    session.close()

    response = gcmClient.send(gcm_id_list, alert, time_to_live=3600)
    if response:
        return ApiResponse({
            'status': 'ok'
        })
    else:
        return ApiResponse({
            'status': 'some error occurred'
        })
예제 #7
0
파일: seeker.py 프로젝트: srg91/bzseeker
    def output_date(self, date, start, dt_format=None):
        """
        Output lines with the date.
        Output starts from the block at "start" position and
         ends when the date is no longer met.
        Next blocks reads as needed.

        :param date: string with some date
        :type date: str
        :param start: a start position of a block
        :type start: int
        :param dt_format: format of the date
        :type dt_format: str
        """
        if dt_format:
            self._set_dt_format(dt_format)

        stamp = to_timestamp(date, self.dt_format)
        block = _Range(start, self._get_end_of_block(start))

        rest = self._print_stamp_from_block(block, stamp)
        while rest:
            block_start = block.end
            block_end = self._get_end_of_block(block_start)

            rest = self._print_stamp_from_block(
                _Range(block_start, block_end), stamp, rest)
예제 #8
0
def user_past_donations(user_id=None):
    session = db.Session()

    if user_id is None:
        user_id = request.args.get('user_id', 0)

    user = session.query(db.User).filter_by(user_id=user_id).first()
    if not user:
        session.close()
        return ApiResponse({
            'status': 'error',
            'message': 'No user with id {0} found'.format(id)
        })

    donations = session.query(db.UserHistory).filter_by(user_id=user.user_id).all()
    result = {
        'user': user.user_id,
        'history': [{
            'date': to_timestamp(d.donation_date),
            'amount': d.amount,
            'hospital': d.hospital.name
        } for d in donations]
    }
    session.close()
    return ApiResponse({
        'history': result
    })
예제 #9
0
def user_past_donations(user_id=None):
    session = db.Session()

    if user_id is None:
        user_id = request.args.get('user_id', 0)

    user = session.query(db.User).filter_by(user_id=user_id).first()
    if not user:
        session.close()
        return ApiResponse({
            'status': 'error',
            'message': 'No user with id {0} found'.format(id)
        })

    donations = session.query(
        db.UserHistory).filter_by(user_id=user.user_id).all()
    result = {
        'user':
        user.user_id,
        'history': [{
            'date': to_timestamp(d.donation_date),
            'amount': d.amount,
            'hospital': d.hospital.name
        } for d in donations]
    }
    session.close()
    return ApiResponse({'history': result})
예제 #10
0
파일: models.py 프로젝트: BetterWang/cmssw
 def as_dicts(self, convert_timestamps=False):
     """
     Returns dictionary form of Global Tag object.
     """
     json_gt = {
         'name': self.name,
         'validity': self.validity,
         'description': self.description,
         'release': self.release,
         'insertion_time': to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
         'snapshot_time': to_timestamp(self.snapshot_time) if convert_timestamps else self.snapshot_time,
         'scenario': self.scenario,
         'workflow': self.workflow,
         'type': self.type
     }
     return json_gt
예제 #11
0
def _adjust_timecode(episode, timestamp):
    '''
    Offset a timecode by the total number of offset frames
    '''
    frame = timestamp_to_seconds(timestamp)
    offsets = episode.offsets
    if episode.is_pioneer:
        offsets = episode.pioneer_offsets
    series = episode.series
    total_offset = 0
    # calculate offset from frame data
    if isinstance(offsets, list):
        # for list-types (movies, not episodes), start with 0 offset
        for o in offsets:
            if frame > frame_to_seconds(o['frame']):
                total_offset += frame_to_seconds(o['offset'])
    else:
        # episodes are map-based, with a key for each chapter
        # orange bricks have a delay on the OP subs
        if (series == 'DBZ' and not episode.is_r1dbox and
           frame < frame_to_seconds(offsets['prologue']["frame"])):
            total_offset += _op_subtitle_delay(episode)
        for key in offsets.keys():
            # also account for ED subs being +0.333 s early
            if frame > frame_to_seconds(offsets[key]["frame"]):
                total_offset += frame_to_seconds(
                    offsets[key]["offset"])
    # apply offset to subtitle timing
    frame -= total_offset

    return to_timestamp(frame)
예제 #12
0
	def send_blob(self, payload, upload_session_id):
		"""
		Send the BLOB of a payload over HTTP.
		The BLOB is put in the request body, so no additional processing has to be done on the server side, apart from decoding from base64.
		"""
		# encode the BLOB data of the Payload to make sure we don't send a character that will influence the HTTPs request
		blob_data = base64.b64encode(payload["data"])

		url_data = {"database" : self.data_to_send["destinationDatabase"], "upload_session_id" : upload_session_id}

		# construct the data to send in the body and header of the HTTPs request
		for key in payload.keys():
			# skip blob
			if key != "data":
				if key == "insertion_time":
					url_data[key] = to_timestamp(payload[key])
				else:
					url_data[key] = payload[key]

		request = url_query(url=self._SERVICE_URL + "store_payload/", url_data=url_data, body=blob_data)

		# send the request and return the response
		# Note - the url_query module will handle retries, and will throw a NoMoreRetriesException if it runs out
		try:
			request_response = request.send()
			return request_response
		except Exception as e:
			# make sure we don't try again - if a NoMoreRetriesException has been thrown, retries have run out
			if isinstance(e, errors.NoMoreRetriesException):
				self._outputter.write("\t\t\tPayload with hash '%s' was not uploaded because the maximum number of retries was exceeded." % payload["hash"])
				self._outputter.write("Payload with hash '%s' was not uploaded because the maximum number of retries was exceeded." % payload["hash"])
			return json.dumps({"error" : str(e), "traceback" : traceback.format_exc()})
예제 #13
0
 def add_satz(self, fullname, result, date):
     s = self.get_schuetze_by_fullname(fullname)
     entry = JSONSatz(schuetze_uuid=s.uuid,
                      result=result,
                      date=utils.to_timestamp(date))
     self.data.append(entry)
     self._generic_add_data(entry, self.settings.data_file)
     return entry
예제 #14
0
파일: evo.py 프로젝트: Yegor-V/flask
    def post():
        """
        Creates new db instance of Employee. position_id, department_id, name and surname args are needed.
        Other info (email, phone, birth_date, start_work_date and is_department_leader) is optional.
        If start_work_date is not set -> will be set to now.
        If is_department_leader is not set -> will be set to False.
        :return: success/error json
        """
        vacancy_id = request.form.get('vacancy_id')
        name = request.form.get('name')
        surname = request.form.get('surname')
        email = request.form.get('email')
        phone = request.form.get('phone')
        birth_date = to_timestamp(request.form.get('birth_date'))
        start_work_date = to_timestamp(request.form.get('start_work_date'))
        is_department_leader = request.form.get('is_department_leader')

        if not all((vacancy_id, name, surname)):
            return {'error': 'vacancy_id, name, surname required'}, 400
        else:
            try:
                vacancy = Vacancy.query.filter_by(id=vacancy_id).first()
                employee = Employee(position_id=vacancy.position.id,
                                    department_id=vacancy.department.id,
                                    name=name,
                                    surname=surname,
                                    email=email,
                                    phone=phone,
                                    birth_date=birth_date,
                                    start_work_date=start_work_date,
                                    is_department_leader=is_department_leader)
                db.session.add(employee)
                db.session.commit()
                return {
                    'success': 'employee created',
                    'employee': {
                        'id': employee.id,
                        'name': employee.name,
                        'surname': employee.surname,
                        'position': employee.position.name
                    }
                }
            except Exception as e:
                return {
                    'error': 'failed to create employee: {}'.format(e)
                }, 400
예제 #15
0
파일: models.py 프로젝트: BetterWang/cmssw
 def as_dicts(self, convert_timestamps=False):
     """
     Returns dictionary form of this Tag.
     """
     return {
         "name" : self.name,
         "time_type" : self.time_type,
         "object_type" : self.object_type,
         "synchronization" : self.synchronization,
         "end_of_validity" : self.end_of_validity,
         "description" : self.description,
         "last_validated_time" : self.last_validated_time,
         "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
         "modification_time" : to_timestamp(self.modification_time) if convert_timestamps else self.modification_time,
         "record" : self.record,
         "label" : self.label
     }
예제 #16
0
파일: json_model.py 프로젝트: hikhvar/Stats
 def add_satz(self, fullname, result, date):
     s = self.get_schuetze_by_fullname(fullname)
     entry = JSONSatz(
         schuetze_uuid=s.uuid,
         result=result,
         date=utils.to_timestamp(date))
     self.data.append(entry)
     self._generic_add_data(entry, self.settings.data_file)
     return entry
예제 #17
0
파일: models.py 프로젝트: BetterWang/cmssw
 def as_dicts(self, convert_timestamps=False):
     """
     Returns dictionary form of this IOV.
     """
     return {
         "tag_name" : self.tag_name,
         "since" : self.since,
         "payload_hash" : self.payload_hash,
         "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
     }
예제 #18
0
 def __init__(self, dictionary={}, convert_timestamps=True):
     # assign each entry in a kwargs
     for key in dictionary:
         try:
             if convert_timestamps:
                 self.__dict__[key] = to_timestamp(dictionary[key])
             else:
                 self.__dict__[key] = dictionary[key]
         except KeyError as k:
             continue
예제 #19
0
파일: models.py 프로젝트: BetterWang/cmssw
 def as_dicts(self, convert_timestamps=False):
     """
     Returns dictionary form of this Payload's metadata (not the actual Payload).
     """
     return {
         "hash" : self.hash,
         "object_type" : self.object_type,
         "version" : self.version,
         "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
     }
예제 #20
0
 def as_dicts(self, convert_timestamps=False):
     """
     Returns dictionary form of this Payload's metadata (not the actual Payload).
     """
     return {
         "hash" : self.hash,
         "object_type" : self.object_type,
         "version" : self.version,
         "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
     }
예제 #21
0
파일: models.py 프로젝트: BetterWang/cmssw
 def __init__(self, dictionary={}, convert_timestamps=True):
     # assign each entry in a kwargs
     for key in dictionary:
         try:
             if convert_timestamps:
                 self.__dict__[key] = to_timestamp(dictionary[key])
             else:
                 self.__dict__[key] = dictionary[key]
         except KeyError as k:
             continue
예제 #22
0
 def as_dicts(self, convert_timestamps=False):
     """
     Returns dictionary form of this IOV.
     """
     return {
         "tag_name" : self.tag_name,
         "since" : self.since,
         "payload_hash" : self.payload_hash,
         "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
     }
예제 #23
0
    def save_progress(self, last_block, last_timestamp, last_hash):
        self.conn.execute("DELETE FROM last_block")
        self.conn.execute(
            "INSERT INTO last_block VALUES (?, ?, ?) ",
            (last_block, to_timestamp(last_timestamp), last_hash))

        self.conn.execute("DELETE FROM schema_version")
        self.conn.execute("INSERT INTO schema_version VALUES (?) ",
                          (Logger.SCHEMA_VERSION, ))

        self.conn.commit()
예제 #24
0
 def safe_handler(*args, **kwargs):
     session_token = request.args.get('session_token', '')
     user_id = request.args.get('user_id', 0)
     user = session.query(User).filter_by(user_id=user_id).first()
     sleep_amount = random.random()/5
     if user and utils.str_equal(user.session_token, session_token) and \
         utils.to_timestamp(user.session_token_expires_at) > time.time():
         return handler(*args, **kwargs)
     else:
         time.sleep(sleep_amount)
         return ApiResponse(config.ACCESS_DENIED_MSG, status='403')
예제 #25
0
def all_campaigns():
    session = db.Session()
    hospital_id = request.args.get('hospital_id', 0)
    #campaigns = session.query(db.Campaign).filter_by(hospital_id=hospital_id).all()
    campaigns = session.query(db.Campaign).all()

    bloodtypes = session.query()
    response = ApiResponse({
        'campaigns': [
            {
                'id': c._id,
                'name': c.name,
                'message': c.message,
                'start_date': to_timestamp(c.start_date),
                'end_date': to_timestamp(c.end_date),
                'active': c.active,
                'bloodtypes': [r.blood_type for r in c.requirement]
            } for c in campaigns]
    })
    session.close()
    return response
예제 #26
0
파일: evo.py 프로젝트: Yegor-V/flask
    def post():
        """
        Opens vacancy in current department. department_id and position_id are needed.
        date_opened is optional (defaults to now)
        :return: success/error json
        """
        department_id = request.form.get('department_id')
        position_id = request.form.get('position_id')
        date_opened = request.form.get('date_opened')

        if not position_id or not department_id:
            return {'error': 'position_id and department_id required'}, 400
        else:
            try:
                department = Department.query.filter_by(
                    id=department_id).first()
                position = Position.query.filter_by(id=position_id).first()
            except DataError:
                return {
                    'error': 'department_id and position_id must be ints'
                }, 400

            if not department or not position:
                return {'error': 'department or position not found'}, 404

            else:
                if date_opened:
                    try:
                        date_opened = to_timestamp(date_opened)
                        vacancy = Vacancy(department_id=department_id,
                                          position_id=position_id,
                                          date_opened=date_opened)
                    except ValueError:
                        return {
                            'error': 'date_opened must be in format mm/dd/yyyy'
                        }, 400
                else:
                    vacancy = Vacancy(department_id=department_id,
                                      position_id=position_id)
                db.session.add(vacancy)
                db.session.commit()
                return {
                    'success': 'vacancy created',
                    'vacancy': {
                        'id': vacancy.id,
                        'department_id': vacancy.department_id,
                        'date_opened': vacancy.str_date_opened,
                        'position': {
                            'id': vacancy.position.id,
                            'name': vacancy.position.name
                        }
                    }
                }
예제 #27
0
 def safe_handler(*args, **kwargs):
     session = db.Session()
     session_token = request.args.get('session_token', '')
     user_id = request.args.get('user_id', 0)
     user = session.query(db.User).filter_by(user_id=user_id).first()
     if user and utils.str_equal(user.session_token, session_token) and \
         utils.to_timestamp(user.session_token_expires_at) > time.time():
         response = handler(*args, **kwargs)
     else:
         response = ApiResponse(config.ACCESS_DENIED_MSG, status='403')
     session.close()
     return response
예제 #28
0
파일: evo.py 프로젝트: Yegor-V/flask
    def patch():
        """
        Updates vacancy data.
        :return: success/error json
        """
        vacancy_id = request.form.get('vacancy_id')
        new_department_id = request.form.get('new_department_id')
        new_position_id = request.form.get('new_position_id')
        new_date_opened = request.form.get('new_date_opened')

        if not vacancy_id or not all(
            (new_department_id, new_position_id, new_date_opened)):
            return {
                'error':
                'vacancy_id and at least one of new_department_id, new_position_id, new_date_opened needed'
            }, 400
        else:
            try:
                vacancy = Vacancy.query.filter_by(id=vacancy_id).first()
                if not vacancy:
                    return {'error': 'vacancy not found'}, 404
                if new_department_id:
                    vacancy.department_id = new_department_id
                if new_position_id:
                    vacancy.position_id = new_position_id
                if new_date_opened:
                    try:
                        vacancy.date_opened = to_timestamp(new_date_opened)
                    except ValueError:
                        return {
                            'error': 'date_opened must be in format mm/dd/yyyy'
                        }, 400
                db.session.add(vacancy)
                db.session.commit()
                return {
                    'success': 'vacancy updated',
                    'vacancy': {
                        'id': vacancy.id,
                        'date_opened': vacancy.date_opened,
                        'department': {
                            'id': vacancy.department_id,
                            'name': vacancy.department.name
                        },
                        'position': {
                            'id': vacancy.position_id,
                            'name': vacancy.position.name
                        }
                    }
                }
            except (DataError, IntegrityError):
                return {'error': 'bad department_id, position_id, vacancy_id'}
예제 #29
0
def login():
    data = json.loads(request.data)
    user_id = data['user_id']
    gcmID = data['gcmID']
    fb_token = data['fb_token']

    payload = {'access_token': fb_token, 'fields': 'id'}
    fb_response = requests.get(config.FB_ENDPOINT, params=payload).json()
    if 'error' in fb_response:
        return ApiResponse(config.ACCESS_DENIED_MSG, status='403')
    elif user_id != fb_response['id']:
        return ApiResponse(config.ACCESS_DENIED_MSG, status='403')

    # Facebook login was successful
    user = session.query(User).filter_by(user_id=user_id).first()
    gcm_id = request.args.get('gcm_id', '')
    blood_type = request.args.get('blood_type', '')

    if user:
        user.fb_token = fb_token
        token, expires_at = User.generate_session_token()
        user.session_token = token
        user.session_token_expires_at = expires_at
        if gcm_id:
            user.gcm_id = gcm_id
        if blood_type:
            user.blood_type = blood_type
        session.commit()
    else:
        user = User(user_id,
                    fb_token=fb_token,
                    gcm_id=gcm_id,
                    blood_type=blood_type)
        session.add(user)
        session.commit()

    if user:
        return ApiResponse({
            'status':
            'OK',
            'session_token':
            user.session_token,
            'expires_at':
            to_timestamp(user.session_token_expires_at)
        })
    else:
        return ApiResponse({
            'status': 'Failed',
            'message': "Couldn't create new user"
        })
예제 #30
0
def dashboard():
    ops = db_fieldbook.get_all_opportunities()
    for op in ops:
        if op["status"] == "Accepted":
            op["class"] = "success"
        elif op["status"] == "Offered":
            op["class"] = "info"
        elif op["status"] == "Expired":
            op["class"] = "active"
        elif op["status"] == "Attended":
            op["class"] = "active"
        elif op["status"] == "Not Attended":
            op["class"] = "active"
        op["remaining_mins"] = int(int(op["expiry_time"] - utils.to_timestamp(datetime.datetime.utcnow())) / 60)
    return render_template('dashboard.html', ops=ops, dash_refresh_timeout=config.dash_refresh_timeout)
예제 #31
0
def login():
    data = json.loads(request.data)
    user_id = data['user_id']
    gcmID = data['gcmID']
    fb_token = data['fb_token']

    payload= {
        'access_token': fb_token,
        'fields': 'id'
    }
    fb_response = requests.get(config.FB_ENDPOINT, params=payload).json()
    if 'error' in fb_response:
        return ApiResponse(config.ACCESS_DENIED_MSG, status='403')
    elif user_id != fb_response['id']:
        return ApiResponse(config.ACCESS_DENIED_MSG, status='403')

    # Facebook login was successful
    user = session.query(User).filter_by(user_id=user_id).first()
    gcm_id = request.args.get('gcm_id', '')
    blood_type = request.args.get('blood_type', '')

    if user:
        user.fb_token = fb_token
        token, expires_at = User.generate_session_token()
        user.session_token = token
        user.session_token_expires_at = expires_at
        if gcm_id:
            user.gcm_id = gcm_id
        if blood_type:
            user.blood_type = blood_type
        session.commit()
    else:
        user = User(user_id, fb_token=fb_token, gcm_id=gcm_id,
                    blood_type=blood_type)
        session.add(user)
        session.commit()

    if user:
        return ApiResponse({
            'status': 'OK',
            'session_token': user.session_token,
            'expires_at': to_timestamp(user.session_token_expires_at)
        })
    else:
        return ApiResponse({
            'status': 'Failed',
            'message': "Couldn't create new user"
        })
예제 #32
0
def login():
    session = db.Session()
    data = json.loads(request.data)
    user_id = data['user_id']
    gcm_id = data['gcm_id']
    fb_token = data['fb_token']

    payload = {'access_token': fb_token, 'fields': ['id', 'name']}
    fb_response = requests.get(config.FB_ENDPOINT, params=payload).json()
    if 'error' in fb_response:
        return ApiResponse(config.ACCESS_DENIED_MSG, status='403')
    elif user_id != fb_response['id']:
        return ApiResponse(config.ACCESS_DENIED_MSG, status='403')

    # Facebook login was successful
    user = session.query(db.User).filter_by(user_id=user_id).first()

    if user:
        user.fb_token = fb_token
        token, expires_at = db.User.generate_session_token()
        user.session_token = token
        user.session_token_expires_at = expires_at
        if gcm_id:
            user.gcm_id = gcm_id
    else:
        name = fb_response['name'].split()
        user = db.User(user_id,
                       name[0],
                       name[-1],
                       fb_token=fb_token,
                       gcm_id=gcm_id)
        #blood_type=blood_type)
        session.add(user)
    session.commit()

    response = ApiResponse(
        {
            'status': 'OK',
            'session_token': user.session_token,
            'expires_at': to_timestamp(user.session_token_expires_at)
        } if user else {
            'status': 'Failed',
            'message': "Couldn't create new user"
        })
    session.close()
    return response
예제 #33
0
def login():
    session = db.Session()
    data = json.loads(request.data)
    user_id = data['user_id']
    gcm_id = data['gcm_id']
    fb_token = data['fb_token']

    payload = {
        'access_token': fb_token,
        'fields': ['id', 'name']
    }
    fb_response = requests.get(config.FB_ENDPOINT, params=payload).json()
    if 'error' in fb_response:
        return ApiResponse(config.ACCESS_DENIED_MSG, status='403')
    elif user_id != fb_response['id']:
        return ApiResponse(config.ACCESS_DENIED_MSG, status='403')

    # Facebook login was successful
    user = session.query(db.User).filter_by(user_id=user_id).first()

    if user:
        user.fb_token = fb_token
        token, expires_at = db.User.generate_session_token()
        user.session_token = token
        user.session_token_expires_at = expires_at
        if gcm_id:
            user.gcm_id = gcm_id
    else:
        name = fb_response['name'].split()
        user = db.User(user_id, name[0], name[-1], fb_token=fb_token, gcm_id=gcm_id)
                    #blood_type=blood_type)
        session.add(user)
    session.commit()

    response = ApiResponse({
        'status': 'OK',
        'session_token': user.session_token,
        'expires_at': to_timestamp(user.session_token_expires_at)
    } if user else {
        'status': 'Failed',
        'message': "Couldn't create new user"
    })
    session.close()
    return response
예제 #34
0
    def send_blob(self, payload, upload_session_id):
        """
		Send the BLOB of a payload over HTTP.
		The BLOB is put in the request body, so no additional processing has to be done on the server side, apart from decoding from base64.
		"""
        # encode the BLOB data of the Payload to make sure we don't send a character that will influence the HTTPs request
        blob_data = base64.b64encode(payload["data"])

        url_data = {
            "database": self.data_to_send["destinationDatabase"],
            "upload_session_id": upload_session_id
        }

        # construct the data to send in the body and header of the HTTPs request
        for key in payload.keys():
            # skip blob
            if key != "data":
                if key == "insertion_time":
                    url_data[key] = to_timestamp(payload[key])
                else:
                    url_data[key] = payload[key]

        request = url_query(url=self._SERVICE_URL + "store_payload/",
                            url_data=url_data,
                            body=blob_data)

        # send the request and return the response
        # Note - the url_query module will handle retries, and will throw a NoMoreRetriesException if it runs out
        try:
            request_response = request.send()
            return request_response
        except Exception as e:
            # make sure we don't try again - if a NoMoreRetriesException has been thrown, retries have run out
            if type(e) == errors.NoMoreRetriesException:
                self._outputter.write(
                    "\t\t\tPayload with hash '%s' was not uploaded because the maximum number of retries was exceeded."
                    % payload["hash"])
                self._outputter.write(
                    "Payload with hash '%s' was not uploaded because the maximum number of retries was exceeded."
                    % payload["hash"])
            return json.dumps({
                "error": str(e),
                "traceback": traceback.format_exc()
            })
예제 #35
0
파일: seeker.py 프로젝트: srg91/bzseeker
    def seek(self, date, dt_format=None):
        """
        Search the start position of date in a bzipped log file.

        :param date: string with some date
        :type date: str
        :return: position of day
        """
        if dt_format:
            self._set_dt_format(dt_format)

        stamp = to_timestamp(date, self.dt_format)
        block = self._get_block_with_date(stamp)
        if not block:
            return

        if block.start == block.end:
            block.end = self._get_end_of_block(block.end)

        return block
예제 #36
0
파일: models.py 프로젝트: BetterWang/cmssw
 def to_array(self):
     return [self.hash, self.object_type, self.version, to_timestamp(self.insertion_time)]
예제 #37
0
    def __init__(self,
                 metadata_source=None,
                 debug=False,
                 verbose=False,
                 testing=False,
                 server="https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
                 **kwargs):
        """
		Upload constructor:
		Given an SQLite file and a Metadata sources, reads into a dictionary read for it to be encoded and uploaded.

		Note: kwargs is used to capture stray arguments - arguments that do not match keywords will not be used.

		Note: default value of service_url should be changed for production.
		"""
        # set private variables
        self._debug = debug
        self._verbose = verbose
        self._testing = testing
        # initialise server-side log data as empty string - will be replaced when we get a response back from the server
        self._log_data = ""
        self._SERVICE_URL = server
        self.upload_session_id = None

        # set up client-side log file
        self.upload_log_file_name = "upload_logs/upload_log_%d" % new_log_file_id(
        )
        self._handle = open(self.upload_log_file_name, "a")

        # set up client-side logging object
        self._outputter = output(verbose=verbose, log_handle=self._handle)
        self._outputter.write("Using server instance at '%s'." %
                              self._SERVICE_URL)

        # expect a CondDBFW data_source object for metadata_source
        if metadata_source == None:
            # no upload metadat has been given - we cannot continue with the upload
            self.exit_upload(
                "A source of metadata must be given so CondDBFW knows how to upload conditions."
            )
        else:
            # set up global metadata source variable
            self.metadata_source = metadata_source.data()

        # check for the destination tag
        # this is required whatever type of upload we're performing
        if self.metadata_source.get("destinationTags") == None:
            self.exit_upload("No destination Tag was given.")
        else:
            if type(self.metadata_source.get(
                    "destinationTags")) == dict and self.metadata_source.get(
                        "destinationTags").keys()[0] == None:
                self.exit_upload("No destination Tag was given.")

        # make sure a destination database was given
        if self.metadata_source.get("destinationDatabase") == None:
            self.exit_upload("No destination database was given.")

        # get Conditions metadata
        if self.metadata_source.get(
                "sourceDB") == None and self.metadata_source.get(
                    "hashToUse") == None:
            """
			If we have neither an sqlite file nor the command line data
			"""
            self.exit_upload("You must give either an SQLite database file, or the necessary command line arguments to replace one."\
                + "\nSee --help for command line argument information.")
        elif self.metadata_source.get("sourceDB") != None:
            """
			We've been given an SQLite file, so try to extract Conditions Metadata based on that and the Upload Metadata in metadata_source
			We now extract the Tag and IOV data from SQLite.  It is added to the dictionary for sending over HTTPs later.
			"""

            # make sure we have an input tag to look for in the source db
            self.input_tag = metadata_source.data().get("inputTag")
            if self.input_tag == None:
                self.exit_upload("No input Tag name was given.")

            # set empty dictionary to contain Tag and IOV data from SQLite
            result_dictionary = {}
            self.sqlite_file_name = self.metadata_source["sourceDB"]
            if not (os.path.isfile(self.sqlite_file_name)):
                self.exit_upload("SQLite file '%s' given doesn't exist." %
                                 self.sqlite_file_name)
            sqlite_con = querying.connect(
                "sqlite://%s" % os.path.abspath(self.sqlite_file_name))

            self._outputter.write("Getting Tag and IOVs from SQLite database.")

            # query for Tag, check for existence, then convert to dictionary
            tag = sqlite_con.tag(name=self.input_tag)
            if tag == None:
                self.exit_upload(
                    "The source Tag '%s' you gave was not found in the SQLite file."
                    % self.input_tag)
            tag = tag.as_dicts(convert_timestamps=True)

            # query for IOVs, check for existence, then convert to dictionaries
            iovs = sqlite_con.iov(tag_name=self.input_tag)
            if iovs == None:
                self.exit_upload(
                    "No IOVs found in the SQLite file given for Tag '%s'." %
                    self.input_tag)
            iovs = iovs.as_dicts(convert_timestamps=True)
            iovs = [iovs] if type(iovs) != list else iovs
            """
			Finally, get the list of all Payload hashes of IOVs,
			then compute the list of hashes for which there is no Payload for
			this is used later to decide if we can continue the upload if the Payload was not found on the server.
			"""
            iovs_for_hashes = sqlite_con.iov(tag_name=self.input_tag)
            if iovs_for_hashes.__class__ == data_sources.json_list:
                hashes_of_iovs = iovs_for_hashes.get_members(
                    "payload_hash").data()
            else:
                hashes_of_iovs = [iovs_for_hashes.payload_hash]
            self.hashes_with_no_local_payload = [
                payload_hash for payload_hash in hashes_of_iovs
                if sqlite_con.payload(hash=payload_hash) == None
            ]

            # close session open on SQLite database file
            sqlite_con.close_session()

        elif metadata_source.data().get("hashToUse") != None:
            """
			Assume we've been given metadata in the command line (since no sqlite file is there, and we have command line arguments).
			We now use Tag and IOV data from command line.  It is added to the dictionary for sending over HTTPs later.
			"""

            # set empty dictionary to contain Tag and IOV data from command line
            result_dictionary = {}

            now = to_timestamp(datetime.now())
            # tag dictionary will be taken from the server
            # this does not require any authentication
            tag = self.get_tag_dictionary()
            self.check_response_for_error_key(tag)
            iovs = [{"tag_name" : self.metadata_source["destinationTag"], "since" : self.metadata_source["since"], "payload_hash" : self.metadata_source["hashToUse"],\
              "insertion_time" : now}]

            # hashToUse cannot be stored locally (no sqlite file is given), so register it as not found
            self.hashes_with_no_local_payload = [
                self.metadata_source["hashToUse"]
            ]

            # Note: normal optimisations will still take place - since the hash checking stage can tell if hashToUse does not exist on the server side

        # if the source Tag is run-based, convert sinces to lumi-based sinces with lumi-section = 0
        if tag["time_type"] == "Run":
            for (i, iov) in enumerate(iovs):
                iovs[i]["since"] = iovs[i]["since"] << 32

        result_dictionary = {"inputTagData": tag, "iovs": iovs}

        # add command line arguments to dictionary
        # remembering that metadata_source is a json_dict object
        result_dictionary.update(metadata_source.data())

        # store in instance variable
        self.data_to_send = result_dictionary

        # if the since doesn't exist, take the first since from the list of IOVs
        if result_dictionary.get("since") == None:
            result_dictionary["since"] = sorted(
                iovs, key=lambda iov: iov["since"])[0]["since"]
        elif self.data_to_send["inputTagData"]["time_type"] == "Run":
            # Tag time_type says IOVs use Runs for sinces, so we convert to Lumi-based for uniform processing
            self.data_to_send["since"] = self.data_to_send["since"] << 32
        """
		TODO - Settle on a single destination tag format.
		"""
        # look for deprecated metadata entries - give warnings
        # Note - we only really support this format
        try:
            if type(result_dictionary["destinationTags"]) == dict:
                self._outputter.write(
                    "WARNING: Multiple destination tags in a single metadata source is deprecated."
                )
        except Exception as e:
            self._outputter.write("ERROR: %s" % str(e))
예제 #38
0
파일: models.py 프로젝트: BetterWang/cmssw
 def to_array(self):
     return [self.since, to_timestamp(self.insertion_time), self.payload_hash]
예제 #39
0
파일: json_model.py 프로젝트: hikhvar/Stats
 def add_event(self, date, description):
     entry = JSONEvent(
         date=utils.to_timestamp(date),
         description=description)
     self.events.append(entry)
     self._generic_add_data(entry, self.settings.event_file)
예제 #40
0
 def insert_fee(self, block_height, block_time, amount):
     self.conn.execute("INSERT INTO fees VALUES (?, ?, ?)",
                       (block_height, to_timestamp(block_time), amount))
예제 #41
0
    def __init__(self, database, bitcoin_rpc, liquid_rpc):
        #Initialize Database if not created
        self.conn = sqlite3.connect(database)
        self.conn.execute(
            '''CREATE TABLE if not exists schema_version (version int)''')

        self.bitcoin_rpc = bitcoin_rpc
        self.liquid_rpc = liquid_rpc

        schema_version = self.conn.execute(
            "SELECT version FROM schema_version").fetchall()
        if len(schema_version) == 0:
            self.conn.execute(
                '''CREATE TABLE if not exists missing_blocks (datetime int, functionary int)'''
            )
            self.conn.execute(
                '''CREATE TABLE if not exists fees (block int, datetime int, amount int)'''
            )
            self.conn.execute(
                '''CREATE TABLE if not exists outages (end_time int, length int)'''
            )
            self.conn.execute(
                '''CREATE TABLE if not exists pegs (block int, datetime int, amount int, txid string, txindex int)'''
            )
            self.conn.execute(
                '''CREATE TABLE if not exists issuances (block int, datetime int, asset text, amount int NULL, txid string, txindex int, token string NULL, tokenamount int NULL)'''
            )
            self.conn.execute(
                '''CREATE TABLE if not exists last_block (block int, datetime int, block_hash string)'''
            )
            self.conn.execute(
                '''CREATE TABLE if not exists wallet (txid string, txindex int, amount int, block_hash string, block_timestamp string, spent_txid string NULL, spent_index int NULL)'''
            )
            self.reindex()
        else:
            if schema_version[0][0] < 2:
                self.conn.execute('DROP TABLE issuances')
                self.conn.execute('DROP TABLE pegs')
                self.conn.execute(
                    '''CREATE TABLE if not exists pegs (block int, datetime int, amount int, txid string. index int)'''
                )
                self.conn.execute(
                    '''CREATE TABLE if not exists issuances (block int, datetime int, asset text, amount int NULL, txid string, txindex int)'''
                )
            if schema_version[0][0] < 3:
                self.conn.execute('DROP TABLE issuances')
                self.conn.execute(
                    '''CREATE TABLE if not exists issuances (block int, datetime int, asset text, amount int NULL, txid string, txindex int, token string NULL, tokenamount int NULL)'''
                )

            if schema_version[0][0] < 4:
                self.conn.execute("DROP TABLE last_block")
                self.conn.execute(
                    '''CREATE TABLE if not exists last_block (block int, datetime int, block_hash string)'''
                )
            if schema_version[0][0] < 5:
                self.conn.execute(
                    '''CREATE TABLE if not exists wallet (txid string, txindex int, amount int, block_hash string, block_timestamp string, spent_txid string NULL, spent_index int NULL)'''
                )
                self.reindex()
            if schema_version[0][0] == 5 or schema_version[0][0] == 6:
                self.reindex()
            else:
                configuration = self.conn.execute(
                    "SELECT block, datetime, block_hash FROM last_block"
                ).fetchall()
                should_reindex = False
                if len(configuration) == 0:
                    should_reindex = True
                else:
                    self.last_time = datetime.fromtimestamp(
                        configuration[0][1])
                    self.last_block = configuration[0][0]
                    self.conn.execute(
                        '''DELETE FROM missing_blocks WHERE datetime >= ? ''',
                        (to_timestamp(self.last_time), ))
                    self.conn.execute(
                        '''DELETE FROM fees WHERE datetime >= ? ''',
                        (to_timestamp(self.last_time), ))
                    self.conn.execute(
                        '''DELETE FROM outages WHERE end_time >= ? ''',
                        (to_timestamp(self.last_time), ))
                    self.conn.execute(
                        '''DELETE FROM pegs WHERE datetime >= ? ''',
                        (to_timestamp(self.last_time), ))
                    self.conn.execute(
                        '''DELETE FROM issuances WHERE datetime >= ? ''',
                        (to_timestamp(self.last_time), ))
                    self.block_hash = configuration[0][2]

                    # Reindex if block hash doesn't check out
                    should_reindex = \
                        self.last_block is not None and self.block_hash is not None and \
                        self.liquid_rpc.getblockhash(self.last_block) != self.block_hash
                if should_reindex:
                    self.reindex()
            self.conn.commit()
예제 #42
0
 def insert_peg(self, block_height, block_time, amount, txid, txindex):
     self.conn.execute(
         "INSERT INTO pegs VALUES (?, ?, ?, ? , ?)",
         (block_height, to_timestamp(block_time), amount, txid, txindex))
예제 #43
0
 def insert_issuance(self, block_height, block_time, asset_id, amount, txid,
                     txindex, token, tokenamount):
     self.conn.execute(
         "INSERT INTO issuances VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
         (block_height, to_timestamp(block_time), asset_id, amount, txid,
          txindex, token, tokenamount))
예제 #44
0
def log(file_handle, message):
	"""
	Very simple logging function, used by output class.
	"""
	file_handle.write("[%s] %s\n" % (to_timestamp(datetime.now()), message))
예제 #45
0
def log(file_handle, message):
    """
	Very simple logging function, used by output class.
	"""
    file_handle.write("[%s] %s\n" % (to_timestamp(datetime.now()), message))
예제 #46
0
    def filter_iovs_by_fcsr(self, upload_session_id):
        """
		Ask for the server for the FCSR based on the synchronization type of the source Tag.
		Then, modify the IOVs (possibly remove some) based on the FCSR we received.
		This is useful in the case that most IOVs have different payloads, and our FCSR is close to the end of the range the IOVs cover.
		"""
        self._outputter.write(
            "Getting the First Condition Safe Run for the current sync type.")

        fcsr_data = self.get_fcsr_from_server(upload_session_id)
        fcsr = fcsr_data["fcsr"]
        fcsr_changed = fcsr_data["fcsr_changed"]
        new_sync = fcsr_data["new_sync"]

        if fcsr_changed:
            self._outputter.write(
                "Synchronization '%s' given was changed to '%s' to match destination Tag."
                % (self.data_to_send["fcsr_filter"], new_sync))

        self._outputter.write("Synchronization '%s' gave FCSR %d for FCSR Filtering."\
             % (self.data_to_send["fcsr_filter"], friendly_since(self.data_to_send["inputTagData"]["time_type"], fcsr)))
        """
		There may be cases where this assumption is not correct (that we can reassign since if fcsr > since)
		Only set since to fcsr from server if the fcsr is further along than the user is trying to upload to
		Note: this applies to run, lumi and timestamp run_types.
		"""

        # if the fcsr is above the since given by the user, we need to set the user since to the fcsr
        if fcsr > self.data_to_send["since"]:
            # check if we're uploading to offline sync - if so, then user since must be >= fcsr, so we should report an error
            if self.data_to_send["fcsr_filter"].lower() == "offline":
                self._outputter.write(
                    "If you're uploading to offline, you can't upload to a since < FCSR.\nNo upload has been processed."
                )
                self.exit_upload()
            self.data_to_send["since"] = fcsr

        self._outputter.write("Final FCSR after comparison with FCSR received from server is %d."\
              % friendly_since(self.data_to_send["inputTagData"]["time_type"], int(self.data_to_send["since"])))
        """
		Post validation processing assuming destination since is now valid.

		Because we don't have an sqlite database to query (everything's in a dictionary),
		we have to go through the IOVs manually find the greatest since that's less than
		the destination since.

		Purpose of this algorithm: move any IOV sinces that we can use up to the fcsr without leaving a hole in the Conditions coverage
		"""

        max_since_below_dest = self.data_to_send["iovs"][0]["since"]
        for (i, iov) in enumerate(self.data_to_send["iovs"]):
            if self.data_to_send["iovs"][i]["since"] <= self.data_to_send[
                    "since"] and self.data_to_send["iovs"][i][
                        "since"] > max_since_below_dest:
                max_since_below_dest = self.data_to_send["iovs"][i]["since"]

        # only select iovs that have sinces >= max_since_below_dest
        # and then shift any IOVs left to the destination since
        self.data_to_send["iovs"] = filter(
            lambda iov: iov["since"] >= max_since_below_dest,
            self.data_to_send["iovs"])
        for (i, iov) in enumerate(self.data_to_send["iovs"]):
            if self.data_to_send["iovs"][i]["since"] < self.data_to_send[
                    "since"]:
                self.data_to_send["iovs"][i]["since"] = self.data_to_send[
                    "since"]

        # modify insertion_time of iovs
        new_time = to_timestamp(datetime.now())
        for (i, iov) in enumerate(self.data_to_send["iovs"]):
            self.data_to_send["iovs"][i]["insertion_time"] = new_time
예제 #47
0
파일: models.py 프로젝트: BetterWang/cmssw
 def to_array(self):
     return [self.name, self.time_type, self.object_type, self.synchronization, to_timestamp(self.insertion_time), self.description]
예제 #48
0
def get_time(x):
    return to_timestamp(dt.datetime.strptime(x, '%Y-%m-%dT%H:%M:%S'))
예제 #49
0
	def filter_iovs_by_fcsr(self, upload_session_id):
		"""
		Ask for the server for the FCSR based on the synchronization type of the source Tag.
		Then, modify the IOVs (possibly remove some) based on the FCSR we received.
		This is useful in the case that most IOVs have different payloads, and our FCSR is close to the end of the range the IOVs cover.
		"""
		self._outputter.write("Getting the First Condition Safe Run for the current sync type.")

		fcsr_data = self.get_fcsr_from_server(upload_session_id)
		fcsr = fcsr_data["fcsr"]
		fcsr_changed = fcsr_data["fcsr_changed"]
		new_sync = fcsr_data["new_sync"]

		if fcsr_changed:
			self._outputter.write("Synchronization '%s' given was changed to '%s' to match destination Tag." % (self.data_to_send["fcsr_filter"], new_sync))

		self._outputter.write("Synchronization '%s' gave FCSR %d for FCSR Filtering."\
							% (self.data_to_send["fcsr_filter"], friendly_since(self.data_to_send["inputTagData"]["time_type"], fcsr)))

		"""
		There may be cases where this assumption is not correct (that we can reassign since if fcsr > since)
		Only set since to fcsr from server if the fcsr is further along than the user is trying to upload to
		Note: this applies to run, lumi and timestamp run_types.
		"""

		# if the fcsr is above the since given by the user, we need to set the user since to the fcsr
		if fcsr > self.data_to_send["since"]:
			# check if we're uploading to offline sync - if so, then user since must be >= fcsr, so we should report an error
			if self.data_to_send["fcsr_filter"].lower() == "offline":
				self._outputter.write("If you're uploading to offline, you can't upload to a since < FCSR.\nNo upload has been processed.")
				self.exit_upload()
			self.data_to_send["since"] = fcsr

		self._outputter.write("Final FCSR after comparison with FCSR received from server is %d."\
								% friendly_since(self.data_to_send["inputTagData"]["time_type"], int(self.data_to_send["since"])))

		"""
		Post validation processing assuming destination since is now valid.

		Because we don't have an sqlite database to query (everything's in a dictionary),
		we have to go through the IOVs manually find the greatest since that's less than
		the destination since.

		Purpose of this algorithm: move any IOV sinces that we can use up to the fcsr without leaving a hole in the Conditions coverage
		"""
		
		max_since_below_dest = self.data_to_send["iovs"][0]["since"]
		for (i, iov) in enumerate(self.data_to_send["iovs"]):
			if self.data_to_send["iovs"][i]["since"] <= self.data_to_send["since"] and self.data_to_send["iovs"][i]["since"] > max_since_below_dest:
				max_since_below_dest = self.data_to_send["iovs"][i]["since"]

		# only select iovs that have sinces >= max_since_below_dest
		# and then shift any IOVs left to the destination since
		self.data_to_send["iovs"] = [iov for iov in self.data_to_send["iovs"] if iov["since"] >= max_since_below_dest]
		for (i, iov) in enumerate(self.data_to_send["iovs"]):
			if self.data_to_send["iovs"][i]["since"] < self.data_to_send["since"]:
				self.data_to_send["iovs"][i]["since"] = self.data_to_send["since"]

		# modify insertion_time of iovs
		new_time = to_timestamp(datetime.now())
		for (i, iov) in enumerate(self.data_to_send["iovs"]):
			self.data_to_send["iovs"][i]["insertion_time"] = new_time
예제 #50
0
파일: models.py 프로젝트: BetterWang/cmssw
 def to_array(self):
     return [self.name, self.release, to_timestamp(self.insertion_time), to_timestamp(self.snapshot_time), self.description]
예제 #51
0
	def __init__(self, metadata_source=None, debug=False, verbose=False, testing=False, server="https://cms-conddb-dev.cern.ch/cmsDbCondUpload/", **kwargs):
		"""
		Upload constructor:
		Given an SQLite file and a Metadata sources, reads into a dictionary read for it to be encoded and uploaded.

		Note: kwargs is used to capture stray arguments - arguments that do not match keywords will not be used.

		Note: default value of service_url should be changed for production.
		"""
		# set private variables
		self._debug = debug
		self._verbose = verbose
		self._testing = testing
		# initialise server-side log data as empty string - will be replaced when we get a response back from the server
		self._log_data = ""
		self._SERVICE_URL = server
		self.upload_session_id = None

		# set up client-side log file
		self.upload_log_file_name = "upload_logs/upload_log_%d" % new_log_file_id()
		self._handle = open(self.upload_log_file_name, "a")

		# set up client-side logging object
		self._outputter = output(verbose=verbose, log_handle=self._handle)
		self._outputter.write("Using server instance at '%s'." % self._SERVICE_URL)

		# expect a CondDBFW data_source object for metadata_source
		if metadata_source == None:
			# no upload metadat has been given - we cannot continue with the upload
			self.exit_upload("A source of metadata must be given so CondDBFW knows how to upload conditions.")
		else:
			# set up global metadata source variable
			self.metadata_source = metadata_source.data()

		# check for the destination tag
		# this is required whatever type of upload we're performing
		if self.metadata_source.get("destinationTags") == None:
			self.exit_upload("No destination Tag was given.")
		else:
			if isinstance(self.metadata_source.get("destinationTags"), dict) and self.metadata_source.get("destinationTags").keys()[0] == None:
				self.exit_upload("No destination Tag was given.")

		# make sure a destination database was given
		if self.metadata_source.get("destinationDatabase") == None:
			self.exit_upload("No destination database was given.")

		# get Conditions metadata
		if self.metadata_source.get("sourceDB") == None and self.metadata_source.get("hashToUse") == None:
			"""
			If we have neither an sqlite file nor the command line data
			"""
			self.exit_upload("You must give either an SQLite database file, or the necessary command line arguments to replace one."\
							+ "\nSee --help for command line argument information.")
		elif self.metadata_source.get("sourceDB") != None:
			"""
			We've been given an SQLite file, so try to extract Conditions Metadata based on that and the Upload Metadata in metadata_source
			We now extract the Tag and IOV data from SQLite.  It is added to the dictionary for sending over HTTPs later.
			"""

			# make sure we have an input tag to look for in the source db
			self.input_tag = metadata_source.data().get("inputTag")
			if self.input_tag == None:
				self.exit_upload("No input Tag name was given.")

			# set empty dictionary to contain Tag and IOV data from SQLite
			result_dictionary = {}
			self.sqlite_file_name = self.metadata_source["sourceDB"]
			if not(os.path.isfile(self.sqlite_file_name)):
				self.exit_upload("SQLite file '%s' given doesn't exist." % self.sqlite_file_name)
			sqlite_con = querying.connect("sqlite://%s" % os.path.abspath(self.sqlite_file_name))

			self._outputter.write("Getting Tag and IOVs from SQLite database.")

			# query for Tag, check for existence, then convert to dictionary
			tag = sqlite_con.tag(name=self.input_tag)
			if tag == None:
				self.exit_upload("The source Tag '%s' you gave was not found in the SQLite file." % self.input_tag)
			tag = tag.as_dicts(convert_timestamps=True)

			# query for IOVs, check for existence, then convert to dictionaries
			iovs = sqlite_con.iov(tag_name=self.input_tag)
			if iovs == None:
				self.exit_upload("No IOVs found in the SQLite file given for Tag '%s'." % self.input_tag)
			iovs = iovs.as_dicts(convert_timestamps=True)
			iovs = [iovs] if not isinstance(iovs, list) else iovs

			"""
			Finally, get the list of all Payload hashes of IOVs,
			then compute the list of hashes for which there is no Payload for
			this is used later to decide if we can continue the upload if the Payload was not found on the server.
			"""
			iovs_for_hashes = sqlite_con.iov(tag_name=self.input_tag)
			if iovs_for_hashes.__class__ == data_sources.json_list:
				hashes_of_iovs = iovs_for_hashes.get_members("payload_hash").data()
			else:
				hashes_of_iovs = [iovs_for_hashes.payload_hash]
			self.hashes_with_no_local_payload = [payload_hash for payload_hash in hashes_of_iovs if sqlite_con.payload(hash=payload_hash) == None]

			# close session open on SQLite database file
			sqlite_con.close_session()

		elif metadata_source.data().get("hashToUse") != None:
			"""
			Assume we've been given metadata in the command line (since no sqlite file is there, and we have command line arguments).
			We now use Tag and IOV data from command line.  It is added to the dictionary for sending over HTTPs later.
			"""

			# set empty dictionary to contain Tag and IOV data from command line
			result_dictionary = {}

			now = to_timestamp(datetime.now())
			# tag dictionary will be taken from the server
			# this does not require any authentication
			tag = self.get_tag_dictionary()
			self.check_response_for_error_key(tag)
			iovs = [{"tag_name" : self.metadata_source["destinationTag"], "since" : self.metadata_source["since"], "payload_hash" : self.metadata_source["hashToUse"],\
					"insertion_time" : now}]

			# hashToUse cannot be stored locally (no sqlite file is given), so register it as not found
			self.hashes_with_no_local_payload = [self.metadata_source["hashToUse"]]

			# Note: normal optimisations will still take place - since the hash checking stage can tell if hashToUse does not exist on the server side

		# if the source Tag is run-based, convert sinces to lumi-based sinces with lumi-section = 0
		if tag["time_type"] == "Run":
			for (i, iov) in enumerate(iovs):
				iovs[i]["since"] = iovs[i]["since"] << 32

		result_dictionary = {"inputTagData" : tag, "iovs" : iovs}

		# add command line arguments to dictionary
		# remembering that metadata_source is a json_dict object
		result_dictionary.update(metadata_source.data())

		# store in instance variable
		self.data_to_send = result_dictionary

		# if the since doesn't exist, take the first since from the list of IOVs
		if result_dictionary.get("since") == None:
			result_dictionary["since"] = sorted(iovs, key=lambda iov : iov["since"])[0]["since"]
		elif self.data_to_send["inputTagData"]["time_type"] == "Run":
			# Tag time_type says IOVs use Runs for sinces, so we convert to Lumi-based for uniform processing
			self.data_to_send["since"] = self.data_to_send["since"] << 32

		"""
		TODO - Settle on a single destination tag format.
		"""
		# look for deprecated metadata entries - give warnings
		# Note - we only really support this format
		try:
			if isinstance(result_dictionary["destinationTags"], dict):
				self._outputter.write("WARNING: Multiple destination tags in a single metadata source is deprecated.")
		except Exception as e:
			self._outputter.write("ERROR: %s" % str(e))
예제 #52
0
파일: models.py 프로젝트: BetterWang/cmssw
 def to_array(self):
     return [self.queue, self.tag, self.record, self.label, status_full_name(self.status), to_timestamp(self.time_submitted), to_timestamp(self.last_edited)]
예제 #53
0
 def insert_missed_block(self, expected_block_time, functionary):
     self.conn.execute("INSERT INTO missing_blocks VALUES (?, ?)",
                       (to_timestamp(expected_block_time), functionary))
예제 #54
0
 def insert_downtime(self, resume_time, downtime):
     self.conn.execute("INSERT INTO outages VALUES (?, ?)",
                       (to_timestamp(resume_time), downtime))