Esempio n. 1
0
	def backup(self, full = False):
		transfer = self.transfer(newshare = self)
		#print(repr(self.newbackup.predecessor))
		#print(repr(self.reference))
		hostconfig = self.host.config

		info = dict(
			failed = False,
			name = self.name,
			path = self.path,
			mountpoint = self.mountpoint,
		)

		with hostconfig.setenv(self.env):
			self.pre_command(fruitbak = self.fruitbak, host = self.host, backup = self.newbackup, newshare = self)

			info['startTime'] = time_ns()

			with self.hardhat_maker:
				transfer.transfer()

			info['endTime'] = time_ns()

			self.post_command(fruitbak = self.fruitbak, host = self.host, backup = self.newbackup, newshare = self)

		with open('info.json', 'w', opener = self.sharedir_fd.opener) as fp:
			dump_json(info, fp)

		return info
Esempio n. 2
0
def get_moment_complete():
    """Return local date and time as day_count, local time as day fraction, and,
    if possible, distance to UTC as fraction of a day."""
    try:
        moment_ns = time.time_ns() # time in ns from epoch; note epoch is platform dependent
    except AttributeError:
        moment_ns = int(time.time() * 1000000000)  # time() returns a float in second
    # for the moment we are using time module's functions to get localtime
    #TODO: check if possible to implement something independent from time module, see e.g. tzlocal
    seconds, nanoseconds = divmod(moment_ns, 1000000000)
    moment = time.localtime(seconds)
    year = moment.tm_year
    days_before_year = (year - 1) * 365 + (year - 1) // 4 - (year - 1) // 100 + (year - 1) // 400
    day_count = days_before_year + moment.tm_yday
    day_frac = Fraction(moment.tm_hour, 24) + Fraction(moment.tm_min, 1440) + Fraction(moment.tm_sec, 86400) + Fraction(nanoseconds, 86400000000000)
    to_utc = -Fraction(moment.tm_gmtoff, 86400)
    return day_count, day_frac, to_utc
Esempio n. 3
0
    def test_time_ns_type(self):
        def check_ns(sec, ns):
            self.assertIsInstance(ns, int)

            sec_ns = int(sec * 1e9)
            # tolerate a difference of 50 ms
            self.assertLess((sec_ns - ns), 50 ** 6, (sec, ns))

        check_ns(time.time(),
                 time.time_ns())
        check_ns(time.monotonic(),
                 time.monotonic_ns())
        check_ns(time.perf_counter(),
                 time.perf_counter_ns())
        check_ns(time.process_time(),
                 time.process_time_ns())

        if hasattr(time, 'clock_gettime'):
            check_ns(time.clock_gettime(time.CLOCK_REALTIME),
                     time.clock_gettime_ns(time.CLOCK_REALTIME))
Esempio n. 4
0
def _Pool_initialize_worker(augseq, seed_start):
    if seed_start is None:
        # pylint falsely thinks in older versions that multiprocessing.current_process() was not
        # callable, see https://github.com/PyCQA/pylint/issues/1699
        # pylint: disable=not-callable
        process_name = multiprocessing.current_process().name
        # pylint: enable=not-callable

        # time_ns() exists only in 3.7+
        if sys.version_info[0] == 3 and sys.version_info[1] >= 7:
            seed_offset = time.time_ns()
        else:
            seed_offset = int(time.time() * 10**6) % 10**6
        seed = hash(process_name) + seed_offset
        seed_global = ia.SEED_MIN_VALUE + (seed - 10**9) % (ia.SEED_MAX_VALUE - ia.SEED_MIN_VALUE)
        seed_local = ia.SEED_MIN_VALUE + seed % (ia.SEED_MAX_VALUE - ia.SEED_MIN_VALUE)
        ia.seed(seed_global)
        augseq.reseed(seed_local)
    Pool._WORKER_SEED_START = seed_start
    Pool._WORKER_AUGSEQ = augseq
    Pool._WORKER_AUGSEQ.localize_random_state_()  # not sure if really necessary, but won't hurt either
Esempio n. 5
0
	def age_months(self):
		start_time = self.start_time
		start_timestruct = localtime(start_time // 1000000000)
		start_yearmonth = start_timestruct.tm_year * 12 + start_timestruct.tm_mon
		beginning_of_start_month = int(mktime((
			start_timestruct.tm_year,
			start_timestruct.tm_mon,
			1, 0, 0, 0, 0, 0, -1,
		)) * 1000000000)
		ending_of_start_month = int(mktime((
			start_timestruct.tm_year,
			start_timestruct.tm_mon + 1,
			1, 0, 0, 0, 0, 0, -1,
		)) * 1000000000)
		start_month_ratio = ((start_time - beginning_of_start_month)
			/ (ending_of_start_month - beginning_of_start_month))

		current_time = time_ns()
		current_timestruct = localtime(current_time // 1000000000)
		current_yearmonth = current_timestruct.tm_year * 12 + current_timestruct.tm_mon
		if current_yearmonth == start_yearmonth:
			beginning_of_current_month = beginning_of_start_month
			ending_of_current_month = ending_of_start_month
		else:
			beginning_of_current_month = int(mktime((
				current_timestruct.tm_year,
				current_timestruct.tm_mon,
				1, 0, 0, 0, 0, 0, -1,
			)) * 1000000000)
			ending_of_current_month = int(mktime((
				current_timestruct.tm_year,
				current_timestruct.tm_mon + 1,
				1, 0, 0, 0, 0, 0, -1,
			)) * 1000000000)
		current_month_ratio = ((current_time - beginning_of_current_month)
			/ (ending_of_current_month - beginning_of_current_month))

		return (current_yearmonth - start_yearmonth
			+ current_month_ratio - start_month_ratio)
Esempio n. 6
0
def uuid1(node=None, clock_seq=None):
    """Generate a UUID from a host ID, sequence number, and the current time.
    If 'node' is not given, getnode() is used to obtain the hardware
    address.  If 'clock_seq' is given, it is used as the sequence number;
    otherwise a random 14-bit sequence number is chosen."""

    # When the system provides a version-1 UUID generator, use it (but don't
    # use UuidCreate here because its UUIDs don't conform to RFC 4122).
    _load_system_functions()
    if _generate_time_safe is not None and node is clock_seq is None:
        uuid_time, safely_generated = _generate_time_safe()
        try:
            is_safe = SafeUUID(safely_generated)
        except ValueError:
            is_safe = SafeUUID.unknown
        return UUID(bytes=uuid_time, is_safe=is_safe)

    global _last_timestamp
    import time
    nanoseconds = time.time_ns()
    # 0x01b21dd213814000 is the number of 100-ns intervals between the
    # UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
    timestamp = nanoseconds // 100 + 0x01b21dd213814000
    if _last_timestamp is not None and timestamp <= _last_timestamp:
        timestamp = _last_timestamp + 1
    _last_timestamp = timestamp
    if clock_seq is None:
        import random
        clock_seq = random.getrandbits(14) # instead of stable storage
    time_low = timestamp & 0xffffffff
    time_mid = (timestamp >> 32) & 0xffff
    time_hi_version = (timestamp >> 48) & 0x0fff
    clock_seq_low = clock_seq & 0xff
    clock_seq_hi_variant = (clock_seq >> 8) & 0x3f
    if node is None:
        node = getnode()
    return UUID(fields=(time_low, time_mid, time_hi_version,
                        clock_seq_hi_variant, clock_seq_low, node), version=1)
Esempio n. 7
0
	def age_days(self):
		return (time_ns() - self.start_time) / 86400000000000
Esempio n. 8
0
     if len(termination_queue):
         _ = termination_queue.get()
         logger.info('worker (%s) terminating on request',
                     node_uid)
         sys.exit(0)
 except queue.Empty:
     pass
 try:
     if len(submit_queue):
         with transaction_context(environment, write=True):
             task = submit_queue.get()
             if task._status == 'submitted':
                 task._status = 'running'
                 task._pid = os.getpid()
                 task._node_uid = node_uid
                 task._start_timestamp = time.time_ns()
             else:
                 continue
     else:
         break
 except queue.Empty:
     break
 try:
     result = error = None
     setenv(constants.SELF_ENVNAME,
            pickle.dumps(task, 0).decode())
     logger.info('start task %s on pid %i', task.asyncable.path,
                 os.getpid())
     result = task.asyncable.invoke(args=task.args,
                                    kwargs=task.kwargs)
 except Exception as exc:
Esempio n. 9
0
	def age_minutes(self):
		return (time_ns() - self.start_time) / 60000000000
def generate_timestamp():
    current_ns = time.time_ns()
    one_day_ns = 86400000000000
    return random.randint(current_ns - one_day_ns, current_ns)
    1603044805017
Esempio n. 11
0
            "ssid_name": "ssid_wpa_eap_2g",
            "appliedRadios": ["2G"]
        }, {
            "ssid_name": "ssid_wpa_eap_5g",
            "appliedRadios": ["5G"]
        }]
    },
    "rf": {},
    "radius": True
}
for sec_modes in setup_params_enterprise['ssid_modes'].keys():
    for i in range(len(setup_params_enterprise['ssid_modes'][sec_modes])):
        N = 3
        rand_string = (''.join(
            random.choices(string.ascii_uppercase + string.digits,
                           k=N))) + str(int(time.time_ns()) % 10000)
        setup_params_enterprise['ssid_modes'][sec_modes][i][
            'ssid_name'] = setup_params_enterprise['ssid_modes'][sec_modes][i][
                'ssid_name'] + "_" + rand_string


@allure.suite(suite_name="interop Regression")
@allure.sub_suite(sub_suite_name="Bridge Mode EAP Client ReConnect : Suite-A")
@pytest.mark.suiteA
@pytest.mark.parametrize('setup_profiles', [setup_params_enterprise],
                         indirect=True,
                         scope="class")
@pytest.mark.usefixtures("setup_profiles")
class TestToggleAirplaneModeBridgeModeEnterpriseTTLSSuiteA(object):
    """ SuiteA Enterprise Test Cases
        pytest -m "client_reconnect and bridge and enterprise and ttls and interop and suiteA"
Esempio n. 12
0
    start_time = 0
    t_start = time.time()

    signal.signal(signal.SIGINT, event_generator._stop_event_gen)

    while True:
        image_request = airsim.ImageRequest("0", airsim.ImageType.Scene, False,
                                            False)

        response = event_generator.client.simGetImages(
            [event_generator.image_request])
        while response[0].height == 0 or response[0].width == 0:
            response = event_generator.client.simGetImages(
                [event_generator.image_request])

        ts = time.time_ns()

        if event_generator.init:
            event_generator.start_ts = ts
            event_generator.init = False

        img = np.reshape(
            np.fromstring(response[0].image_data_uint8, dtype=np.uint8),
            event_generator.rgb_image_shape,
        )

        img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY).astype(np.float32)
        # Add small number to avoid issues with log(I)
        img = cv2.add(img, 0.001)

        ts = time.time_ns()
def lambda_handler(event, context):
    host = 'search-comse6998-xyiaghg4awnumubfxszrh56nlu.us-east-1.es.amazonaws.com'
    region = 'us-east-1'
    service = 'es'
    awsauth = AWS4Auth('aws_access_key_id', 'aws_secret_access_key', region,
                       service)

    es = Elasticsearch(hosts=[{
        'host': host,
        'port': 443
    }],
                       http_auth=awsauth,
                       use_ssl=True,
                       verify_certs=True,
                       connection_class=RequestsHttpConnection)

    queryStringParameters = event["queryStringParameters"]
    # userid = queryStringParameters['userid'].strip()

    token = queryStringParameters['token'].strip()
    # Invoke lambda
    invokeLambda = boto3.client('lambda',
                                region_name='us-east-1',
                                aws_access_key_id='aws_access_key_id',
                                aws_secret_access_key='aws_secret_access_key')

    payload_event = {"body": {"token": token}}

    invoke_response = invokeLambda.invoke(
        FunctionName='comse6998-project-token-verification',
        InvocationType='RequestResponse',
        Payload=json.dumps(payload_event))

    body = json.loads(invoke_response["Payload"].read())
    # Convert string to dict
    body = json.loads(body['body'])

    userid = ''
    if 'username' in body:
        userid = body['username']
    else:
        res = {
            'statusCode': 200,
            'headers': {
                'Access-Control-Allow-Headers': 'Content-Type',
                'Access-Control-Allow-Origin': '*',
                'Access-Control-Allow-Methods': 'OPTIONS,POST,PUT,GET'
            },
            'body': json.dumps({'invalid': True})
        }
        return res

    recipeid = queryStringParameters['recipeid'].strip()

    es_id = userid + '_' + recipeid
    payload = {"query": {"terms": {"_id": [es_id]}}}

    frequency = 1
    if es.indices.exists(index='user_recipes'):
        res = es.search(index='user_recipes', body=payload)
        if res['hits']['hits']:
            frequency = res['hits']['hits'][0]['_source']['frequency'] + 1

    history_recipe = {}
    if event['body']:
        history_recipe = json.loads(event['body'])
        for k, v in history_recipe.items():
            if type(v) is decimal.Decimal:
                history_recipe[k] = int(v)

    json_object = {
        'userid': userid,
        'recipeid': recipeid,
        'frequency': frequency,
        'timestamp': time.time_ns(),
        'history_recipe': history_recipe
    }

    document = json_object
    es.index(index="user_recipes",
             doc_type="user_recipe",
             id=es_id,
             body=document)

    # Update current_recipe in DB
    ACCESS_KEY = 'ACCESS_KEY'
    SECRET_KEY = 'SECRET_KEY'
    region_name = 'us-east-1'

    dynamodb = boto3.resource('dynamodb',
                              aws_access_key_id=ACCESS_KEY,
                              aws_secret_access_key=SECRET_KEY,
                              region_name=region_name)
    table = dynamodb.Table('users')
    response = table.query(KeyConditionExpression=Key('userid').eq(userid))

    item = response['Items'][0]
    item['current_recipe'].append(history_recipe)
    table.put_item(Item=item)

    # Send message to SQS queue
    sqs = boto3.client('sqs',
                       region_name=region,
                       aws_access_key_id='aws_access_key_id',
                       aws_secret_access_key='aws_secret_access_key')

    queue_url = 'https://sqs.us-east-1.amazonaws.com/640615917264/comse6009-project-q.fifo'
    response = sqs.send_message(
        QueueUrl=queue_url,
        MessageGroupId='Group1',
        MessageAttributes={
            'userid': {
                'DataType': 'String',
                'StringValue': userid
            },
            'recipeid': {
                'DataType': 'String',
                'StringValue': recipeid
            },
            'calories': {
                'DataType': 'String',
                'StringValue': str(history_recipe['calories'])
            },
            'carbs': {
                'DataType': 'String',
                'StringValue': str(history_recipe['carbs'])
            },
            'fat': {
                'DataType': 'String',
                'StringValue': str(history_recipe['fat'])
            },
            'fiber': {
                'DataType': 'String',
                'StringValue': str(history_recipe['fiber'])
            },
            'image': {
                'DataType': 'String',
                'StringValue': str(history_recipe['image'])
            },
            'protein': {
                'DataType': 'String',
                'StringValue': str(history_recipe['protein'])
            },
            # 'sodium': {
            #     'DataType': 'String',
            #     'StringValue': str(history_recipe['sodium'])
            # },
            'sugar': {
                'DataType': 'String',
                'StringValue': str(history_recipe['sugar'])
            },
            'title': {
                'DataType': 'String',
                'StringValue': str(history_recipe['title'])
                # },
                # 'vc': {
                #     'DataType': 'String',
                #     'StringValue': str(history_recipe['vc'])
            }
        },
        MessageBody=(userid + '|' + str(time.time_ns())))

    print(response['MessageId'])

    return {
        'statusCode': 200,
        'headers': {
            'Access-Control-Allow-Headers': 'Content-Type',
            'Access-Control-Allow-Origin': '*',
            'Access-Control-Allow-Methods': 'OPTIONS,POST,PUT,GET'
        },
        'body': json.dumps(json_object)
    }
Esempio n. 14
0
 def validate_timestamp(timestamp: Timestamp) -> None:
     assert ts.monotonic_ns <= timestamp.monotonic_ns <= time.monotonic_ns()
     assert ts.system_ns <= timestamp.system_ns <= time.time_ns()
Esempio n. 15
0

# https://www.programiz.com/dsa/radix-sort
def radix_sort(array):
    # Get maximum element
    max_element = max(array)

    # Apply counting sort to sort elements based on place value.
    place = 1
    while max_element // place > 0:
        countingSort(array, place)
        place *= 10


array = get_array()
t0 = time.time_ns()
bubble_sort(array)
t1 = time.time_ns()
total = (t1 - t0) / 1e6  # convert time from nanoseconds into microseconds
print("Bubble Sort Time: ", total)

array1 = get_array()
t0 = time.time_ns()
quick_sort(array1, 0, len(array1) - 1)
t1 = time.time_ns()
total = (t1 - t0) / 1e6  # convert time from nanoseconds into microseconds
print("Quick Sort Time: ", total)

array2 = get_array()
t0 = time.time_ns()
radix_sort(array2)
Esempio n. 16
0
import time
from backend.blockchain.blockchain import Blockchain
from backend.config import SECONDS
blockchain=Blockchain()
times=[]
for i in range(1000):
    startime=time.time_ns()
    blockchain.add_block(i)
    end_time=time.time_ns()
    time_to_mine=(end_time-startime)/SECONDS
    times.append(time_to_mine)

    average_time=sum(times)/len(times)
    print(f'New block difficulty:{blockchain.chain[-1].difficulty}')
    print(f'Time to mine new block:{time_to_mine}s')
    print(f'Average time to add blocks:{average_time}s\n')
Esempio n. 17
0
array_sizes = 2**np.arange(9)
for size in array_sizes:

    print("size {}".format(size**2 * 8))
    t_set = []
    t_get = []
    t_set_complete = []
    t_get_complete = []

    key = "{0:015b}".format(1)
    for i in range(N):

        # set
        x = np.random.uniform(0, 1, size=(size, size))

        t_start = time.time_ns()  #r.time()
        r.set(key, x.tobytes())
        t_end = time.time_ns()  #r.time()

        job_time = (t_end - t_start) * 10**(
            -9)  #(t_end[0] + t_end[1]*1e-6)-(t_start[0] + t_start[1]*1e-6)
        t_set.append(job_time)
        #t_set_complete.append(t_end[0] + t_end[1]*1e-6)

        # get
        t_start = time.time_ns()  #r.time()
        r.get(key)
        t_end = time.time_ns()  #r.time()

        job_time = (t_end - t_start) * 10**(
            -9)  #(t_end[0] + t_end[1]*1e-6)-(t_start[0] + t_start[1]*1e-6)
Esempio n. 18
0
 def reset(self) -> None:
     if self._start_time == 0:
         self._start_time = time.time_ns()
Esempio n. 19
0
 def current_value(self) -> int:
     current_time = time.time_ns()
     return (current_time - self._start_time) // 1_000_000_000
Esempio n. 20
0
 def filter(self, record):
     setattr(record, 'timestamp_nanos', time.time_ns())
     return True
    def start_reID(self, img_array):
        t_batch_start = time.time_ns()
        t_singleimg_start = time.time_ns()
        feature_array = self.feature_extractor.extract_feature_numpy(img_array)
        t_singleimg_end = time.time_ns()
        logging.debug("Batch feature extraction took" +
                      str((t_singleimg_end - t_singleimg_start) / 1000000000) +
                      "seconds")
        #print(feature)
        count = 0
        for feature in feature_array:
            feature = [feature]
            #print(feature)
            smallest_index, smallest_distance = self.ringbuffer.nearestneighbors(
                feature)
            #print(smallest_distance)
            logging.debug("Smallest distance: " + str(smallest_distance))
            #print(len(self.ringbuffer.ringbuffer))
            logging.debug("Length of ringbuffer: " +
                          str(len(self.ringbuffer.ringbuffer)))
            if self.ringbuffer.ringbuffer:
                logging.debug("Length of ringbuffer[0]" +
                              str(len(self.ringbuffer.ringbuffer[0])))
            if smallest_distance <= THRESHHOLD_RE_ID:
                #print("Erkannt")
                last_seen, person_id = self.ringbuffer.lastseen(smallest_index)
                #self.sayhello.sayagain_async(last_seen)
                self.ringbuffer.addnewfeature(smallest_index, feature)
                img_old = self.ringbuffer.getimage(smallest_index)
                if person_id != self.last_person:
                    #print("Hallo ich habe sie das letzte mal", last_seen, "gesehen")
                    self.last_person = person_id
                    self.speaking_queue.append(last_seen)
                elif (time.time() - self.last_seen_person) > 5:
                    #print("Hallo ich habe sie das letzte mal", last_seen, "gesehen")
                    self.last_person = person_id
                    self.speaking_queue.append(last_seen)
                #else:
                #print(time.time() - self.last_seen_person)
                #self.last_person = person_id
                self.update(img_old, person_id)
                self.last_seen_person = time.time()

            elif smallest_distance >= THRESHHOLD_NEW_ID:
                self.ringbuffer.addnewperson(feature,
                                             np.array(img_array[count]))
                #print("Herzlich Willkommen!")
                self.speaking_queue.append(1)
                #self.sayhello.sayhello_async()

            t_batch_end = time.time_ns()
            t_avarage = (t_batch_end - t_batch_start) / 1000000000
            t_time_all = self.avarage_batch_time * self.batches_processed + t_avarage
            self.batches_processed += 1
            self.avarage_batch_time = t_time_all / self.batches_processed
            print("batch took", t_avarage, "seconds")
            print(
                "Avarage (batch):",
                self.avarage_batch_time,
            )
            count += 1
Esempio n. 22
0
 def __init__(self, cjxa_loc, dictate: str):
     self.id = str(time.time_ns())
     self.lines = self.unpack_cjxa(cjxa_loc, dictate)
Esempio n. 23
0
import threading
# Part1 标准库 threading
# -------------多线程-----------------------

def funcA():
    for  _ in range(1000000):
        pass
def funcB():
    for _ in range(1000000):
        pass

# 多线程创建方法 1: 创建threading.Thread对象,args传递target参数
t1 = threading.Thread(group=None, target=funcA, name='funcA-Thread', args=(), kwargs={}, daemon=None)
t2 = threading.Thread(group=None, target=funcB, name='funcB-Thread', args=(), kwargs={}, daemon=None)

s_time = time.time_ns()
t1.start() # ibvoke object’s run()
t2.start()
print('t1 is alive? %s' % t1.is_alive())
#t1.join() # or not 
print('t1 is alive? %s' % t1.is_alive())

print('t2 is alive? %s' % t2.is_alive())
#t2.join()
print('t1 is alive? %s' % t1.is_alive())
print('t2 is alive? %s' % t2.is_alive())
e_time = time.time_ns()
print('consumed %d' % (e_time - s_time))
# other functions
print(t1.name)
print(t1.ident)
Esempio n. 24
0
def bench(n_times, sampler):
    for i in range(n_times):
        ma.init_random_parameters(seed=1234, sigma=0.01)
        samples = sampler.generate_samples(n_samples)
    return samples


def j_bench(n_times, sampler):
    for i in range(n_times):
        j_ma.init_random_parameters(seed=1234, sigma=0.01)
        samples = sampler.generate_samples(n_samples)
        samples.block_until_ready()
    return samples


samples = j_bench(1, j_sa)
samples.block_until_ready()
t0 = time.time_ns()
samples = j_bench(300, j_sa)
print("Jax sampler (dtype " + str(dtype) + ")")
tf = time.time_ns()
print("time (s) ", (tf - t0) / 1.0e9)

samples = bench(1, sa)
t0 = time.time_ns()
samples = bench(300, sa)
print("Numpy sampler (dtype " + str(dtype) + ")")
tf = time.time_ns()
print("time (s) ", (tf - t0) / 1.0e9)
Esempio n. 25
0
def run():
    # Command line arguments
    parser = argparse.ArgumentParser(description='Load from Json from Pub/Sub into BigQuery')
    parser.add_argument('--project',required=True, help='Specify Google Cloud project')
    parser.add_argument('--region', required=True, help='Specify Google Cloud region')
    parser.add_argument('--staging_location', required=True, help='Specify Cloud Storage bucket for staging')
    parser.add_argument('--temp_location', required=True, help='Specify Cloud Storage bucket for temp')
    parser.add_argument('--runner', required=True, help='Specify Apache Beam Runner')
    parser.add_argument('--input_topic', required=True, help='Input Pub/Sub Topic')
    parser.add_argument('--agg_table_name', required=True, help='BigQuery table name for aggregate results')
    parser.add_argument('--raw_table_name', required=True, help='BigQuery table name for raw inputs')
    parser.add_argument('--window_duration', required=True, help='Window duration')

    opts = parser.parse_args()

    # Setting up the Beam pipeline options
    options = PipelineOptions(save_main_session=True, streaming=True)
    options.view_as(GoogleCloudOptions).project = opts.project
    options.view_as(GoogleCloudOptions).region = opts.region
    options.view_as(GoogleCloudOptions).staging_location = opts.staging_location
    options.view_as(GoogleCloudOptions).temp_location = opts.temp_location
    options.view_as(GoogleCloudOptions).job_name = '{0}{1}'.format('streaming-minute-traffic-pipeline-',time.time_ns())
    options.view_as(StandardOptions).runner = opts.runner

    input_topic = opts.input_topic
    raw_table_name = opts.raw_table_name
    agg_table_name = opts.agg_table_name
    window_duration = opts.window_duration

    # Table schema for BigQuery
    agg_table_schema = {
        "fields": [
            {
                "name": "page_views",
                "type": "INTEGER"
            },
            {
                "name": "timestamp",
                "type": "STRING"
            },

        ]
    }

    raw_table_schema = {
        "fields": [
            {
                "name": "ip",
                "type": "STRING"
            },
            {
                "name": "user_id",
                "type": "STRING"
            },
            {
                "name": "user_agent",
                "type": "STRING"
            },
            {
                "name": "lat",
                "type": "FLOAT",
                "mode": "NULLABLE"
            },
            {
                "name": "lng",
                "type": "FLOAT",
                "mode": "NULLABLE"
            },
            {
                "name": "event_timestamp",
                "type": "STRING"
            },
            {
                "name": "processing_timestamp",
                "type": "STRING"
            },
            {
                "name": "http_request",
                "type": "STRING"
            },
            {
                "name": "http_response",
                "type": "INTEGER"
            },
            {
                "name": "num_bytes",
                "type": "INTEGER"
            }
        ]
    }

    # Create the pipeline
    p = beam.Pipeline(options=options)



    parsed_msgs = (p | 'ReadFromPubSub' >> beam.io.ReadFromPubSub(input_topic)
                     | 'ParseJson' >> beam.Map(parse_json).with_output_types(CommonLog))

    (parsed_msgs
        | "AddProcessingTimestamp" >> beam.Map(add_processing_timestamp)
        | 'WriteRawToBQ' >> beam.io.WriteToBigQuery(
            raw_table_name,
            schema=raw_table_schema,
            create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
            write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND
            )
        )

    (parsed_msgs
        | "WindowByMinute" >> beam.WindowInto(beam.window.FixedWindows(60))
        | "CountPerMinute" >> beam.CombineGlobally(CountCombineFn()).without_defaults()
        | "AddWindowTimestamp" >> beam.ParDo(GetTimestampFn())
        | 'WriteAggToBQ' >> beam.io.WriteToBigQuery(
            agg_table_name,
            schema=agg_table_schema,
            create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
            write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND
            )
    )

    logging.getLogger().setLevel(logging.INFO)
    logging.info("Building pipeline ...")

    p.run().wait_until_finish()
Esempio n. 26
0
def test_get_time_line_value_last(sequence_factory):
    start_time = time.time_ns()
    sequence_factory._time_stamps = [start_time + i for i in range(3)]
    sequence_factory._values = [f"val_{i}" for i in range(3)]
    assert sequence_factory._get_time_line_value(start_time * 2) == "val_2"
Esempio n. 27
0
import time

ptime = time.time_ns()

import sys

sys.stdin = open('input.txt', 'r')


#####################################################

def solution(n, k):
    nstr = 'a' * k
    for i in range(n-k):
        if i % 3 == 0:
            nstr += 'b'
        elif i % 3 == 1:
            nstr += 'c'
        else:
            nstr += 'a'
    return nstr

t = int(input())
for tt in range(t):
    n, k = map(int, input().split())
    print(solution(n, k))

#####################################################

print('time', time.time_ns() - ptime)
Esempio n. 28
0
def test_sequence_factory_update(sequence_factory, chromosome):
    sequence_factory.set_start_time(time.time_ns())
    sequence_factory.update(chromosome)
    assert sequence_factory._time_stamps[0] >= 0
    assert sequence_factory._values[0] == 42
Esempio n. 29
0
def recordMovements():
    global MOVE_TIMER
    MOVE_TIMER = time.time_ns()
    open('datapoints.txt', 'w').close()  #clear the file for new datapoints
    with mouse.Listener(on_move=on_move, on_click=on_click) as listener:
        listener.join()
Esempio n. 30
0
def time_f(f) -> int:
    start = time.time_ns()
    f()
    return int(time.time_ns() - start)
Esempio n. 31
0
        l1, l2 = Var(BOOPType, "l1"), Var(BOOPType, "l2")
        r1, r2 = Var(BOOPType, "r1"), Var(BOOPType, "r2")

        c = (l1.B < 1) & (l1.B > 7) & (l2.B < r1.B) & (r2.B < l1.B) |\
             (l2.B < 1) & (l2.B > 7) & (l2.B < r1.B) & (r1.B < r2.B) |\
             (l2.B < 1) & (l2.B > 7) & (l2.B < r1.B) & (l1.B < l2.B)

        kb = KnowledgeBase()
        cl = get_linked_conditions_instance(c, kb)

        assert get_pointer(cl) == get_pointer(c)

        cl = get_linked_conditions_instance(c, kb, copy=True)

        assert get_pointer(cl) != get_pointer(c)


if (__name__ == "__main__"):
    test_link()
    test_initialize()
    # test_term()
    for i in range(10):
        t0 = time_ns()
        test_build_conditions()
        print(f'{(time_ns()-t0)/1e6} ms')
# # bar.py_func()
# bar()

# exit()
def main():

    # Parse command line arguments

    args = parse_args()

    # Extract arguments

    ntask = args.ntask
    nthreads = args.nthreads
    optimization = args.optimization
    nrun = args.nrun

    TUNER_NAME = args.optimization
    (machine, processor, nodes, cores) = GetMachineConfiguration()
    print("machine: " + machine + " processor: " + processor + " num_nodes: " +
          str(nodes) + " num_cores: " + str(cores))

    os.environ['MACHINE_NAME'] = machine
    os.environ['TUNER_NAME'] = TUNER_NAME

    # Task parameters
    geomodels = [
        "cavity_5cell_30K_feko", "pillbox_4000", "pillbox_1000",
        "cavity_wakefield_4K_feko", "cavity_rec_5K_feko", "cavity_rec_17K_feko"
    ]
    # geomodels = ["cavity_wakefield_4K_feko"]
    model = Categoricalnorm(geomodels, transform="onehot", name="model")

    # Input parameters  # the frequency resolution is 100Khz
    # freq      = Integer     (22000, 23500, transform="normalize", name="freq")
    # freq      = Integer     (15000, 23500, transform="normalize", name="freq")
    # freq      = Integer     (19300, 22300, transform="normalize", name="freq")
    # freq      = Integer     (15000, 40000, transform="normalize", name="freq")
    # freq      = Integer     (15000, 18000, transform="normalize", name="freq")
    # freq      = Integer     (6320, 6430, transform="normalize", name="freq")
    # freq      = Integer     (21000, 22800, transform="normalize", name="freq")
    freq = Integer(11400, 12000, transform="normalize", name="freq")
    # freq      = Integer     (500, 900, transform="normalize", name="freq")
    result1 = Real(float("-Inf"), float("Inf"), name="r1")

    IS = Space([model])
    PS = Space([freq])

    OS = Space([result1])

    constraints = {}
    models = {}
    constants = {"nodes": nodes, "cores": cores, "nthreads": nthreads}
    """ Print all input and parameter samples """
    print(IS, PS, OS, constraints, models)

    problem = TuningProblem(IS,
                            PS,
                            OS,
                            objectives,
                            constraints,
                            None,
                            constants=constants)
    computer = Computer(nodes=nodes, cores=cores, hosts=None)
    """ Set and validate options """
    options = Options()
    options['model_processes'] = 1
    # options['model_threads'] = 1
    options['model_restarts'] = 1
    # options['search_multitask_processes'] = 1
    # options['model_restart_processes'] = 1
    options['distributed_memory_parallelism'] = False
    options['shared_memory_parallelism'] = False
    options['model_class '] = 'Model_LCM'  # 'Model_GPy_LCM'
    options['verbose'] = False

    # options['search_algo'] = 'nsga2' #'maco' #'moead' #'nsga2' #'nspso'
    # options['search_pop_size'] = 1000 # 1000
    # options['search_gen'] = 10

    options.validate(computer=computer)

    # """ Building MLA with the given list of tasks """
    # giventask = [["pillbox_4000"]]
    giventask = [["pillbox_1000"]]
    # giventask = [["cavity_5cell_30K_feko"]]
    # giventask = [["cavity_rec_5K_feko"]]
    # giventask = [["cavity_wakefield_4K_feko"]]

    if (TUNER_NAME == 'GPTune'):
        t3 = time.time_ns()
        data = Data(problem)
        gt = GPTune(problem,
                    computer=computer,
                    data=data,
                    options=options,
                    driverabspath=os.path.abspath(__file__))

        NI = len(giventask)
        NS = max(nrun // 2, 1)
        (data, model, stats) = gt.MLA(NS=NS, NI=NI, Igiven=giventask, NS1=NS)

        (Pall, Oall) = readdata(giventask[0][0])

        print("Pall: ", Pall)
        print("Oall: ", Oall)

        try:
            file = open(giventask[0][0] + '_Nmodes.txt', 'r')
            Lines = file.readlines()
            Nmode = int(Lines[0].strip())
            file.close()
        except IOError:
            Nmode = 0
            print("no mode found in the intial samples")

        for nn in range(NS):
            mm = 0
            while mm < Nmode:
                data = Data(problem)
                data.P = [Pall[mm]]
                data.O = [np.array(Oall[mm])]
                gt = GPTune(problem,
                            computer=computer,
                            data=data,
                            options=options,
                            driverabspath=os.path.abspath(__file__))

                NI = len(giventask)
                (data, model, stats) = gt.MLA(NS=len(data.P[0]) + 1,
                                              NI=NI,
                                              Igiven=giventask,
                                              NS1=len(data.P[0]))

                (Pall, Oall) = readdata(giventask[0][0])

                file = open(giventask[0][0] + '_Nmodes.txt', 'r')
                Lines = file.readlines()
                Nmode = int(Lines[0].strip())
                file.close()
                mm += 1
        """ Print all input and parameter samples """
        for mm in range(Nmode):
            print("mode: %d" % (mm))
            print("    geometry:%s" % (giventask[0][0]))
            print("    Ps ", Pall[mm])

            OL = np.asarray([o[0] for o in Oall[mm]], dtype=np.float64)
            np.set_printoptions(suppress=False, precision=8)
            print("    Os ", OL)
            print('    Popt ', Pall[mm][np.argmin(Oall[mm])], 'Oopt ',
                  min(Oall[mm])[0], 'nth ', np.argmin(Oall[mm]))
        t4 = time.time_ns()
        print("Total time: ", (t4 - t3) / 1e9)
Esempio n. 33
0
	def age_seconds(self):
		return (time_ns() - self.start_time) / 1000000000
import random
import time
import math


sampSize = 16
cycleProb = [0] * sampSize
pre = time.time_ns()
loops = 100000
#print("counting cycles for an array of size", sampSize, and )
for y in range(loops):
    nrList = random.sample(range(0, sampSize), sampSize)
    usedList = []
    cycles = 0
    #print(nrList)
    for x in range(len(nrList)):
        #print("usedlist: ", usedList)
        if(x not in usedList):
            usedList.append(x)
            nextIndex = nrList[x]
            while(nextIndex != x):
                usedList.append(nextIndex)
                #print("from: ", nextIndex, "to: ", nrList[nextIndex])
                nextIndex = nrList[nextIndex]
                #print("next index: ", nextIndex)
            cycles += 1
    #print(cycles)
    cycleProb[cycles] = cycleProb[cycles] + 1

for x in range(len(cycleProb)):
    cycleProb[x] = (cycleProb[x]*100)/loops
Esempio n. 35
0
	def age_hours(self):
		return (time_ns() - self.start_time) / 3600000000000
Esempio n. 36
0
import time
import sys
from itertools import combinations

prev_time = time.time_ns()
# sys.stdin = open('input1.txt','r')
read = sys.stdin.readline

def solution():
    min_val = 10**9

    for co in comb_chi:
        house_val = [0 for _ in range(len(house_list))]

        for i in range(len(house_list)):
            min_dis = 10 ** 9
            for c in co:
                val_dis = abs(c[0]-house_list[i][0]) + abs(c[1]-house_list[i][1])
                if min_dis > val_dis:
                    min_dis = val_dis
            house_val[i] = min_dis
        min_val = min(min_val,sum(house_val))
    return min_val


N,M = map(int,read().strip().split())
map_arr = [list(map(int,read().strip().split())) for _ in range(N)]

house_list = []
chi_list = []
for i in range(N):
Esempio n. 37
0
	def age_weeks(self):
		return (time_ns() - self.start_time) / 604800000000000
Esempio n. 38
0
import time

ptime = time.time_ns()

import sys

sys.stdin = open('input.txt', 'r')


#####################################################

def solution(n,p,k,alist,x,y):
    mcnt = 0

    if

    return mcnt

t = int(input())
for tt in range(t):
    n, p, k = map(int,input().split())
    alist = list(map(int,list(input())))
    x, y = map(int,input().split())

    print(solution(n,p,k,alist,x,y))

#####################################################

print('time', time.time_ns() - ptime)

Esempio n. 39
0
def broadcast_thread(buffers, send_queue, free_queue, strategy):
    if strategy.get('broadcast_process', None):
        print("Lanching broadcast process")
        broadcast = subprocess.Popen(strategy['broadcast_process'],
                                     stdout=subprocess.DEVNULL,
                                     stderr=subprocess.DEVNULL)
    else:
        broadcast = None

    def exit_handler(signum, _):
        broadcast.kill()
        exit(0)

    signal.signal(signal.SIGINT, exit_handler)
    signal.signal(signal.SIGTSTP, exit_handler)

    clientSocket = socket.socket(family=socket.AF_INET,
                                 type=socket.SOCK_STREAM)
    connected = False
    while connected == False:
        try:
            clientSocket.connect(("127.0.0.1", 20001))
            connected = True
        except:
            print("Could not connect to broadcast port")
            time.sleep(0.25)
    frames = 0
    total_frames = 0
    # processing allowance in nanoseconds
    allowance = 1.0 / 29.97 * 1000000000
    begin_batch = time.time()
    while True:
        current_buffer = send_queue.get()
        while send_queue.qsize() < 4:
            pass
        begin = time.time_ns()
        while current_buffer is not None:
            frame_buf = buffers[current_buffer]
            frames += 1
            total_frames += 1
            clientSocket.send(frame_buf)
            free_queue.put(current_buffer)
            current_buffer = None
            if send_queue.qsize() > 16:
                while (time.time_ns() - begin) < allowance * 0.95:
                    pass
            else:
                while (time.time_ns() - begin) < allowance:
                    pass
            while current_buffer is None:
                try:
                    current_buffer = send_queue.get_nowait()
                except:
                    current_buffer = None
            if frames % 100 == 0:
                duration = time.time() - begin_batch
                time_per_frame = duration / 100
                fps = 1.0 / time_per_frame
                print(
                    f"processed_frames={total_frames}, FPS = {fps}, Depth= {free_queue.qsize()}, {send_queue.qsize()}"
                )
                #reset
                frames = 0
                begin_batch = time.time()
            begin = time.time_ns()
Esempio n. 40
0
async def login_submit(username: str, password: str, location: str,
                       region: str):
    #### 开始登录
    requestSession = requests.session()
    requestSession.headers.update({
        'User-Agent':
        'Mozilla/5.0 (Linux; Android 12; 114514FUCK) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.73 Mobile Safari/537.36',
        'Accept':
        'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
    })

    # get cookie: SESSION
    ignore = requestSession.get('https://cas.hfut.edu.cn/cas/login')
    ignore.raise_for_status()

    # get cookie: JSESSIONID
    ignore = requestSession.get('https://cas.hfut.edu.cn/cas/vercode')
    ignore.raise_for_status()

    # get encryption key
    timeInMillisecond = round(time.time_ns() / 100000)
    responseForKey = requestSession.get(
        url='https://cas.hfut.edu.cn/cas/checkInitVercode',
        params={'_': timeInMillisecond})
    responseForKey.raise_for_status()

    encryptionKey = responseForKey.cookies['LOGIN_FLAVORING']

    # check if verification code is required
    if responseForKey.json():
        logger.info('需要验证码,过一会再试试吧。')
        return False, '需要验证码,过一会再试试吧。'

    # try to login
    encryptedPassword = await encryptPassword(password, encryptionKey)
    checkIdResponse = requestSession.get(
        url='https://cas.hfut.edu.cn/cas/policy/checkUserIdenty',
        params={
            '_': (timeInMillisecond + 1),
            'username': username,
            'password': encryptedPassword
        })
    checkIdResponse.raise_for_status()

    checkIdResponseJson = checkIdResponse.json()
    if checkIdResponseJson['msg'] != 'success':
        # login failed
        if checkIdResponseJson['data']['mailRequired'] or checkIdResponseJson[
                'data']['phoneRequired']:
            # the problem may be solved manually
            logger.info('需要进行手机或邮箱认证,移步: https://cas.hfut.edu.cn/')
            return False, '需要进行手机或邮箱认证'
        logger.info(f'处理checkUserIdenty时出现错误:{checkIdResponseJson["msg"]}')
        return False, f'处理checkUserIdenty时出现错误:{checkIdResponseJson["msg"]}'
    requestSession.headers.update(
        {'Content-Type': 'application/x-www-form-urlencoded'})

    loginResponse = requestSession.post(
        url='https://cas.hfut.edu.cn/cas/login',
        data={
            'username': username,
            'capcha': '',
            'execution': 'e1s1',
            '_eventId': 'submit',
            'password': encryptedPassword,
            'geolocation': "",
            'submit': "登录"
        })
    loginResponse.raise_for_status()

    requestSession.headers.pop('Content-Type')
    if 'cas协议登录成功跳转页面。' not in loginResponse.text:
        # log in failed
        logger.info('登录失败')
        return False, '未知原因,登录失败'
    # log in success
    logger.info('登录成功')

    #### 开始提交
    ignore = requestSession.get(
        url='http://stu.hfut.edu.cn/xsfw/sys/swmjbxxapp/*default/index.do')
    # always 502, ignore this
    #ignore.raise_for_status()

    requestSession.headers.update({
        'Content-Type': 'application/x-www-form-urlencoded',
        'X-Requested-With': 'XMLHttpRequest'
    })
    ignore = requestSession.post(
        url='http://stu.hfut.edu.cn/xsfw/sys/emapfunauth/welcomeAutoIndex.do')
    ignore.raise_for_status()

    requestSession.headers.pop('Content-Type')
    requestSession.headers.pop('X-Requested-With')
    ignore = requestSession.get(
        url='http://stu.hfut.edu.cn/xsfw/sys/emapfunauth/casValidate.do',
        params={'service': '/xsfw/sys/swmjbxxapp/*default/index.do'})
    ignore.raise_for_status()

    requestSession.headers.update({
        'X-Requested-With':
        'XMLHttpRequest',
        'Referer':
        'http://stu.hfut.edu.cn/xsfw/sys/swmjbxxapp/*default/index.do'
    })
    ignore = requestSession.get(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/emappagelog/config/swmxsyqxxsjapp.do')
    ignore.raise_for_status()

    # get role config
    requestSession.headers.pop('X-Requested-With')
    requestSession.headers.update(
        {'Content-Type': 'application/x-www-form-urlencoded'})
    configData = {
        'data':
        json.dumps({
            'APPID': '5811260348942403',
            'APPNAME': 'swmxsyqxxsjapp'
        })
    }
    roleConfigResponse = requestSession.post(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/swpubapp/MobileCommon/getSelRoleConfig.do',
        data=configData)
    roleConfigResponse.raise_for_status()

    roleConfigJson = roleConfigResponse.json()
    if roleConfigJson['code'] != '0':
        # :(
        logger.info(f'处理roleConfig时发生错误:{roleConfigJson["msg"]}')
        return False, f'处理roleConfig时发生错误:{roleConfigJson["msg"]}'

    # get menu info
    menuInfoResponse = requestSession.post(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/swpubapp/MobileCommon/getMenuInfo.do',
        data=configData)
    menuInfoResponse.raise_for_status()

    menuInfoJson = menuInfoResponse.json()

    if menuInfoJson['code'] != '0':
        # :(
        logger.info(f'处理menuInfo时发生错误:{menuInfoJson["msg"]}')
        return False, f'处理menuInfo时发生错误:{menuInfoJson["msg"]}'

    todayDateStr = "%.2d-%.2d-%.2d" % time.localtime()[:3]

    # if submitted
    ifSubmitted = requestSession.post(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/swmxsyqxxsjapp/modules/mrbpa/judgeTodayHasData.do',
        data={'data': json.dumps({'TBSJ': todayDateStr})})
    ifSubmittedJson = ifSubmitted.json()
    if len(ifSubmittedJson['data']) == 1:
        logger.info('今天已经打过卡了,处理结束')
        return 'have_done', ''

    # get setting... for what?
    requestSession.headers.pop('Content-Type')
    settingResponse = requestSession.get(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/swmxsyqxxsjapp/modules/mrbpa/getSetting.do',
        data={'data': ''})
    settingResponse.raise_for_status()

    settingJson = settingResponse.json()

    # get the form submitted last time
    requestSession.headers.update(
        {'Content-Type': 'application/x-www-form-urlencoded'})
    lastSubmittedResponse = requestSession.post(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/swmxsyqxxsjapp/modules/mrbpa/getStuXx.do',
        data={'data': json.dumps({'TBSJ': todayDateStr})})
    lastSubmittedResponse.raise_for_status()

    lastSubmittedJson = lastSubmittedResponse.json()

    if lastSubmittedJson['code'] != '0':
        # something wrong with the form submitted last time
        logger.info('上次填报提交的信息出现了问题,本次最好手动填报提交。')
        return 'need_self', ''

    # get the form submitted yesterday
    todayDateStr_tmp = (date.today() + timedelta(days=-1)).strftime("%Y-%m-%d")
    yes_DateStr = f'{todayDateStr_tmp}-{username}'
    requestSession.headers.update(
        {'Content-Type': 'application/x-www-form-urlencoded'})
    yes_SubmittedResponse = requestSession.post(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/swmxsyqxxsjapp/modules/mrbpa/getStuXx.do',
        data={'data': json.dumps({
            'WID': yes_DateStr,
            'TBSJ': todayDateStr
        })})
    yes_SubmittedResponse.raise_for_status()

    yes_SubmittedJson = yes_SubmittedResponse.json()

    studentKeyResponse = requestSession.post(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/swmxsyqxxsjapp/modules/mrbpa/studentKey.do',
        data={})
    studentKeyJson = studentKeyResponse.json()

    # generate today's form to submit
    submitDataToday = lastSubmittedJson['data']
    submitDataToday.update({
        'BY1':
        '1',
        'DFHTJHBSJ':
        '',
        'DZ_SFSB':
        '1',
        'DZ_TBDZ':
        location,
        'DZ_TBSJDZ':
        region,
        "DZ_AKMSFYC_DISPLAY":
        "否",
        "DZ_XCKSFYC_DISPLAY":
        "否",
        "DZ_AKMSFYC":
        "0",
        "DZ_XCKSFYC":
        "0",
        "DZ_SCAKMJT":
        yes_SubmittedJson['data']['DZ_SCAKMJT'],
        "DZ_SCXCKJT":
        yes_SubmittedJson['data']['DZ_SCXCKJT'],
        'GCJSRQ':
        '',
        'GCKSRQ':
        '',
        'TBSJ':
        todayDateStr,
        'studentKey':
        studentKeyJson['data']['studentKey']
    })

    paramKeyResponse = requestSession.post(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/swmxsyqxxsjapp/modules/mrbpa/setCode.do',
        data={'data': json.dumps(submitDataToday)})
    paramKeyJson = paramKeyResponse.json()

    # try to submit
    submitResponse = requestSession.post(
        url=
        'http://stu.hfut.edu.cn/xsfw/sys/swmxsyqxxsjapp/modules/mrbpa/saveStuXx.do',
        data={'data': json.dumps(paramKeyJson['data'])})
    submitResponse.raise_for_status()

    submitResponseJson = submitResponse.json()

    if submitResponseJson['code'] != '0':
        # failed
        logger.info(f'提交时出现错误:{submitResponseJson["msg"]}')
        return False, f'提交时出现错误:{submitResponseJson["msg"]}'

    # succeeded
    logger.info('提交成功')
    requestSession.headers.pop('Referer')
    requestSession.headers.pop('Content-Type')
    return 'success', ''
Esempio n. 41
0
#!/usr/bin/env python3

import time

print(time.time())    # E.g. 1534680332.789262
print(time.time_ns()) # E.g. 1534680332789296548
Esempio n. 42
0
        carry_forward = ""
        chunk = "INITIALIZED"
        while len(chunk) > 0:
            chunk = f.read(chunk_size)
            augmented_chunk = carry_forward + chunk
            lines = augmented_chunk.split(delimiter)
            carry_forward = lines.pop()
            yield from lines
        if carry_forward:
            yield carry_forward


idx = [0, 500, 9000]

import time

t = time.time_ns()

fn = "tweets.jsonl"
table = read_file(fn)

rs = []
for i, r in enumerate(table):
    if i in idx:
        rw = json.loads(r)
        rs.append(rw["username"])

print(rs)

print((time.time_ns() - t) / 1e9, i)