Esempio n. 1
0
from glue.core.roi import Roi
from glue.core import glue_pickle as gp
from glue.core.subset_group import coerce_subset_groups
from glue.utils import lookup_class

literals = tuple([type(None), float, int, bytes, bool])

if six.PY2:
    literals += (long, )  # noqa

literals += tuple(s for s in np.ScalarType
                  if s not in (np.datetime64, np.timedelta64))

builtin_iterables = (tuple, list, set)

JSON_ENCODER = json.JSONEncoder()

# We need to make sure that we don't break backward-compatibility when we move
# classes/functions around in Glue, so we have a file that maps the old paths to
# the new location, and we read this in to PATH_PATCHES.
PATCH_FILE = os.path.abspath(
    os.path.join(os.path.dirname(__file__), 'state_path_patches.txt'))

# For Mac app, need to get file from source directory
if not os.path.exists(PATCH_FILE) and 'site-packages.zip' in PATCH_FILE:
    PATCH_FILE = PATCH_FILE.replace('site-packages.zip', 'glue')

PATH_PATCHES = {}
for line in open(PATCH_FILE):
    before, after = line.strip().split(' -> ')
    PATH_PATCHES[before.strip()] = after.strip()
Esempio n. 2
0
    def __init__(self,
       nodes=[],
       urlopen=None,
       urlopen_args=[],
       urlopen_kwargs={},
       min_timeout=2.0,
       timeout_backoff=1.0,
       max_timeout=30.0,
       max_retries=-1,
       req_id=0,
       req_id_increment=1,
       sleep_function=None,
       appbase=False,
       json_encoder=None,
       json_decoder=None,
       ):
        """
        :param nodes:  List of Steem nodes to connect to # 연결하고자하는 스팀노드의 리스트
        :param urlopen:  Function used to load remote URL, # remote URL 을 가져올 때 쓰이는 함수, urlopen파라미터가 없으면 urllib.request.urlopen이 사용됨
        urllib.request.urlopen is used if this parameter None or unspecified
        :param urlopen_args:  List of extra positional arguments to pass to the urlopen function # urlopen 함수에 대해 추가적인 파라미터
        :param urlopen_kwargs:  List of extra keyword arguments to pass to the urlopen function # urlopen 함수에추가적인 keyword 리스트
        :param min_timeout:  Minimum amount of time to wait # 기다려야하는 최소 시간
        :param timeout_backoff:  Amount to increase timeout on HTTP failure code # HTTP 실패 코드에서 양을 증가???
        :param max_timeout:  Maximum amount of time to wait # 기다려야하는 최대 시간
        :param max_retries:  Maximum number of retries to attempt (-1 means try again forever) # 재시도해야하는 최대 횟수 (-1은 계속 재시도)
        :param req_id:  The ID of the first request # 첫번째 request의 id
        :param req_id_increment:  The amount by which subsequent request ID's should be incremented # 증가되어야하는 request id 양
        :param sleep_function:  time.sleep() or similar # sleep
        :param appbase:  If true, require keyword arguments.  If false, require positional arguments.  # appbase가 false이면 인자 필요
        :param json_encoder:  Used to encode JSON for requests.  If not supplied, uses json.JSONEncoder # request에 대해 json 인코딩
        :param json_decoder:  Used to decode JSON from responses.  If not supplied, uses json.JSONDecoder # response에 대해 json 디코딩
        """
        self.nodes = list(nodes)
        self.current_node = 0 # 현재 노드의 인덱스

        if urlopen is None: # urlopen 파라미터가 none이면 urllib.request.urlopen 으로 urlopen
            urlopen = urllib.request.urlopen
        self.urlopen = urlopen
        self.urlopen_args = list(urlopen_args)
        self.urlopen_kwargs = dict(urlopen_kwargs)

        self.min_timeout = min_timeout
        self.timeout_backoff = timeout_backoff
        self.max_timeout = max_timeout
        self.max_retries = max_retries

        self.req_id = req_id
        self.req_id_increment = req_id_increment
        if sleep_function is None:
            sleep_function = time.sleep
        self.sleep_function = sleep_function

        self.appbase = appbase

        if json_encoder is None:
            json_encoder = json.JSONEncoder(
                ensure_ascii=True,
                sort_keys=True,
                separators=(",", ":"),
                )
        self.json_encoder = json_encoder
        if json_decoder is None:
            json_decoder = json.JSONDecoder(
                object_pairs_hook=collections.OrderedDict,
                )
        self.json_decoder = json_decoder
        return
Esempio n. 3
0
def create_json_with_cur_data(daily_energy_dict, daily_power_dict,
                              watts_total_dict, data_timestamp_date,
                              data_timestamp_hms):
    # Create a JSON file with the following format:
    '''
	{
		"Day":"2016-12-02",
		"HMS":"06:01:48",
		"BldgC":
			{
				"DEU": "7.97415290855e+22",
				"DPU": "3.4670231691e+21",
				"WU": "10671098896.4"
			}
		,
		"Sherman": 
			{
				"DEU": "7.97415290855e+22",
				"DPU": "3.4670231691e+21",
				"WU": "10671098896.4"
			}
		,
		"RauchChiller":
			{
				"DEU": "7.97415290855e+22",
				"DPU": "3.4670231691e+21",
				"WU": "10671098896.4"
			}
		,
		"Jordan":
			{
				"DEU": "7.97415290855e+22",
				"DPU": "3.4670231691e+21",
				"WU": "10671098896.4"
			}
		,
		"Varsity":
			{
				"DEU": "7.97415290855e+22",
				"DPU": "3.4670231691e+21",
				"WU": "10671098896.4"
			}
		,
		"Stadium": 
			{
				"DEU": "7.97415290855e+22",
				"DPU": "3.4670231691e+21",
				"WU": "10671098896.4"
			}
		,
		"Williams":
			{
				"DEU": "7.97415290855e+22",
				"DPU": "3.4670231691e+21",
				"WU": "10671098896.4"
			}
	}
	'''

    # Use a dictionary to create the JSON object. To be encoded below.
    main_dict = {}
    main_dict["Day"] = data_timestamp_date
    main_dict["HMS"] = data_timestamp_hms

    # For each building name, retrieve statistics and store in dictionary.
    for building_name in daily_energy_dict.keys():
        tmp_dict = {
            "DEU": daily_energy_dict[building_name],
            "DPU": daily_power_dict[building_name],
            "WU": watts_total_dict[building_name]
        }
        main_dict[building_name] = tmp_dict

    # Return as a JSON object.
    return json.JSONEncoder().encode(main_dict)
Esempio n. 4
0
    def post(self, request, vimid="", tenantid="", subnetid=""):
        logger.info("vimid, tenantid, subnetid = %s,%s,%s" %
                    (vimid, tenantid, subnetid))
        if request.data:
            logger.debug("With data = %s" % request.data)
            pass
        try:
            #check if created already: check name
            query = "project_id=%s&name=%s" % (tenantid, request.data["name"])
            networkid = request.data.get("networkId", None)
            if networkid:
                query += "&network_id=%s" % networkid

            content, status_code = self._get_subnets(query, vimid, tenantid)
            existed = False
            if status_code == 200:
                for subnet in content["subnets"]:
                    if subnet["name"] == request.data["name"]:
                        existed = True
                        break
                if existed == True:
                    vim_dict = {
                        "returnCode": 0,
                    }
                    subnet.update(vim_dict)
                    return Response(data=subnet, status=status_code)

            # prepare request resource to vim instance
            req_resouce = "v2.0/subnets"

            vim = VimDriverUtils.get_vim_info(vimid)
            sess = VimDriverUtils.get_session(vim, tenantid)
            subnet = request.data
            VimDriverUtils.replace_key_by_mapping(subnet, self.keys_mapping,
                                                  True)
            req_body = json.JSONEncoder().encode({"subnet": subnet})

            self.service['region_name'] = vim['openstack_region_id'] \
                if vim.get('openstack_region_id') \
                else vim['cloud_region_id']

            logger.info("making request with URI:%s" % req_resouce)
            logger.debug("with data:%s" % req_body)
            resp = sess.post(req_resouce,
                             data=req_body,
                             endpoint_filter=self.service)
            logger.info("request returns with status %s" % resp.status_code)
            resp_body = resp.json()["subnet"]
            VimDriverUtils.replace_key_by_mapping(resp_body, self.keys_mapping)
            vim_dict = {
                "vimName": vim["name"],
                "vimId": vim["vimId"],
                "cloud-owner": vim["cloud_owner"],
                "cloud-region-id": vim["cloud_region_id"],
                "tenantId": tenantid,
                "returnCode": 1,
            }
            resp_body.update(vim_dict)
            return Response(data=resp_body, status=resp.status_code)
        except VimDriverNewtonException as e:
            logger.error("response with status = %s" % e.status_code)
            return Response(data={'error': e.content}, status=e.status_code)
        except HttpError as e:
            logger.error("HttpError: status:%s, response:%s" %
                         (e.http_status, e.response.json()))
            return Response(data=e.response.json(), status=e.http_status)
        except Exception as e:
            logger.error(traceback.format_exc())
            return Response(data={'error': str(e)},
                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
Esempio n. 5
0
 def setUp(self):
     self.j = json.JSONEncoder()
Esempio n. 6
0
def main():

    moreDetails = False

    if len(sys.argv) > 1 and sys.argv[1] == 'True':
        moreDetails = True
        print(
            "You have chosen to get some more details of your schema coverage. This will be provided in a file in root "
            "called 'individualSchemaCoverage.csv'. The overview of the covered schema will be printed to a file "
            "called 'schemaCoverageDictionary.csv'.")
        with open('individualSchemaCoverage.csv', 'w') as csvfile:
            csvwriter = csv.writer(csvfile)
            csvwriter.writerow(['testID', 'individualCoverage'])

    templateLoader = jinja2.FileSystemLoader(searchpath="")
    templateEnv = jinja2.Environment(loader=templateLoader)
    template = templateEnv.get_template(cfg.test_template)

    parser = GraphQLParser()
    encoder = json.JSONEncoder()

    types = requests.post(cfg.graphql_url,
                          data=encoder.encode(cfg.schema_query),
                          headers={'content-type': 'application/json'})

    schema = json.loads(types.content)['data']['__schema']

    #jsonschema = json.dumps(schema)
    #jsonFile = open('schema.json', 'w+')
    #jsonFile.write(jsonschema)

    createDict = CreateDictionaries(schema)
    possValuesDict = createDict.possibleValuesDictionary()
    schemaCoverageDict = createDict.schemaCoverageDictionary()

    searcher = SchemaSearcher(schema, schemaCoverageDict)
    walker = AstWalker(searcher)
    createAssertions = CreateAssertions(possValuesDict)

    for f in os.listdir('queries/'):
        id = f.split('.json')[0]
        if id == '.DS_Store':
            continue
        testName = 'Q' + ''.join(id.split('-')) + 'Test'
        payload = open('queries/' + f).read()
        jsonPayload = "<<<'JSON'\n" + payload + "\nJSON"

        try:
            dict = json.loads(payload)
        except:
            print("Couldn't load " + id)
            continue

        try:
            astree = parser.parse(dict['query'])
        except:
            print('Something is wrong with test ' + id)
            continue

        mutation = False
        query = None

        # Checking there are no mutations in query
        for tree in astree.definitions:
            if type(tree) == graphql.ast.Mutation:
                print(id + ' contains mutations and will not be used')
                mutation = True
                break

        # Skipping current query if contains mutations
        if mutation:
            continue

        searcher.setId(id)

        # Checking other types in query
        for tree in astree.definitions:
            if type(tree) == graphql.ast.FragmentDefinition:
                success = createDict.createFragmentDictionary(tree, walker)
                if success:
                    walker.fragmentDictionary = createDict.fragmentDictionary
                else:
                    astree.definitions.append(tree)
                    continue
            elif type(tree) == graphql.ast.Query or type(tree) == None:
                query = tree

        rootNode = walker.walk(query, None)

        if moreDetails:
            createSchemaDict = CreateDictionaries(schema)
            individualSchemaCoverageDict = createSchemaDict.schemaCoverageDictionary(
            )
            schemaSearcher = SchemaSearcher(schema,
                                            individualSchemaCoverageDict)
            schemaWalker = AstWalker(schemaSearcher)
            schemaWalker.fragmentDictionary = createDict.fragmentDictionary
            schemaWalker.walk(query, None)
            with open('individualSchemaCoverage.csv', 'a') as csvfile:
                csvwriter = csv.writer(csvfile)
                csvwriter.writerow(
                    [id, (schemaSearcher.calculateSchemaCoverage() * 100)])

        variables = ['$a', '$b', '$c', '$d', '$e', '$f', '$g']

        try:
            assertions = []
            for node in rootNode:
                nodeAssertions = createAssertions.createAssertions(
                    node, variables)
                for line in nodeAssertions:
                    assertions.append(line)
            output = template.render(className=testName,
                                     query=jsonPayload,
                                     allAssertions=assertions,
                                     graphQLURL=cfg.graphql_url,
                                     authToken=cfg.authorization_token)
            testfile = open('testCases/' + testName + '.php', 'w')
            testfile.write(output)
            testfile.close()
        except:
            continue

    if moreDetails:
        with open('schemaCoverageDictionary.csv', 'w') as csvfile:
            csvwriter = csv.writer(csvfile)
            csvwriter.writerow(
                ['schemaTuple', 'visited', 'timesVisited', 'id'])
            for line in schemaCoverageDict:
                csvwriter.writerow([
                    line, schemaCoverageDict[line][1],
                    schemaCoverageDict[line][0], schemaCoverageDict[line][2]
                ])

    print("The schema coverage for the generated test suite is: " +
          str(searcher.calculateSchemaCoverage() * 100) + ' %' +
          " where mutations are: " + str(searcher.calculateMutations() * 100) +
          ' % of the schema and input objects are: ' +
          str(searcher.calculateInputTypes() * 100) + ' % of the schema.')
 class InvalidJSONModel6(TemporaryModel):
     field = JSONField(encoder=json.JSONEncoder())
Esempio n. 8
0
def roll_the_dice():
    die1 = random.randint(1, 12)
    die2 = random.randint(1, 12)
    return json.JSONEncoder().encode({'die1': die1, 'die2': die2})
Esempio n. 9
0
 def send_subscription_request(self):
     ret = req_by_msb('api/gvnfmdriver/v1/%s/subscriptions' % self.vnfm_id, 'POST', json.JSONEncoder().encode(self.subscription_request_data))
     if ret[0] != 0:
         logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
         raise NSLCMException("Failed to subscribe from vnfm(%s)." % self.vnfm_id)
     self.subscription_response_data = json.JSONDecoder().decode(ret[1])
Esempio n. 10
0
class JSONStandardRWFormat(TokenRWFormat):

    DECODER = json.JSONDecoder().decode
    ENCODER = json.JSONEncoder().encode

    NAME_MAPPING = {
        'bool': bool,
        'unicode': str,
        'int': int,
        'float': float,
        'null': type(None),
        'tuple': tuple,
        'list': list,
        'map': dict,
    }

    TYPE_MAPPING = dict([(val, key) for key, val in NAME_MAPPING.items()])
    RESERVED_CLASS_IDS = set(NAME_MAPPING.keys())

    @classmethod
    def isNullOrPrimitive(cls, x):
        if x is None:
            return True

        if type(x) in cls.VALID_ATOMIC_VALUE_TYPES:
            return True

        return False

    @classmethod
    def parse(cls, string):
        decoded = cls.DECODER(string)
        return cls.makeNative(decoded)

    @classmethod
    def serialize(cls, data):
        """ Serialize python objects into a JSON string form """
        serializable = cls.makeSerializable(data)
        return cls.ENCODER(serializable)

    @classmethod
    def makeSerializable(cls, x):
        if x is None:
            return x
        xType = type(x)

        if xType in cls.VALID_ATOMIC_VALUE_TYPES:
            return x
        elif xType in cls.VALID_SEQUENCE_VALUE_TYPES:
            sequenceData = []
            for obj in x:
                sequenceData.append(cls.makeSerializable(obj))
            return sequenceData
        elif xType in cls.VALID_MAPPING_VALUE_TYPES:
            processedMap = {}
            for key in x.keys():
                processedMap[cls.makeSerializable(key)] = cls.makeSerializable(
                    x[key])
                processedMap['isMap'] = True
            return processedMap
        elif xType == StorageToken:
            storageTokenChildren = {}
            for key in x.keys():
                value = x[key]
                storageTokenChildren[key] = cls.makeSerializable(value)
            return storageTokenChildren

        return

    @classmethod
    def makeNative(cls, x):
        if cls.isNullOrPrimitive(x):
            return x

        xType = type(x)

        if xType in cls.VALID_SEQUENCE_VALUE_TYPES:
            result = []
            for item in x:
                result.append(cls.makeNative(item))
            return result
        elif xType in cls.VALID_MAPPING_VALUE_TYPES:
            result = {}
            keys = x.keys()
            if 'isMap' in x.keys():
                for key in x:
                    nativizedKey = cls.makeNative(key)
                    nativizedValue = cls.makeNative(x[key])
                    result[nativizedKey] = nativizedValue
                del result['isMap']
                return result
            else:
                nativizedData = {}
                for key in x.keys():
                    if cls.isNullOrPrimitive(x[key]):
                        nativizedData[key] = x[key]
                    else:
                        nativizedData[key] = cls.makeNative(x[key])

                result = StorageToken(None,
                                      nativizedData[StorageToken.CLASS_ID_KEY],
                                      nativizedData)
                return result
Esempio n. 11
0
class JSONRWFormat(TokenRWFormat):
    """ JSON Serialization Format Handler """
    DECODER = json.JSONDecoder().decode
    ENCODER = json.JSONEncoder().encode

    NAME_MAPPING = {
        'bool': bool,
        'int': int,
        'float': float,
        'unicode': str,
        'null': type(None),
        'tuple': tuple,
        'list': list,
        'map': dict,
    }
    TYPE_MAPPING = dict([(val, key) for key, val in NAME_MAPPING.items()])
    RESERVED_CLASS_IDS = set(NAME_MAPPING.keys())

    @classmethod
    def parse(cls, string):
        """ Parse a JSON string into python objects """
        decoded = cls.DECODER(string)
        return cls.makeNative(decoded)

    @classmethod
    def serialize(cls, data):
        """ Serialize python objects into a JSON string form """
        serializable = cls.makeSerializable(data)
        return cls.ENCODER(serializable)

    @classmethod
    def makeSerializable(cls, x):
        xType = type(x)
        if xType in cls.VALID_ATOMIC_VALUE_TYPES:
            return x
        elif xType in cls.VALID_SEQUENCE_VALUE_TYPES:
            return {
                cls.TYPE_MAPPING[xType]:
                tuple([cls.makeSerializable(val) for val in x])
            }
        elif xType in cls.VALID_MAPPING_VALUE_TYPES:
            return {
                cls.TYPE_MAPPING[xType]:
                dict([(cls.makeSerializable(key), cls.makeSerializable(val))
                      for key, val in x.items()])
            }
        elif xType == StorageToken:
            # Use the Factory Class Id as the type
            return {
                x.getClassId():
                dict([(cls.makeSerializable(key), cls.makeSerializable(val))
                      for key, val in x.items()])
            }
        else:
            raise TypeError(
                "Tried to serialize unserializable object of type (%s): %s" %
                (xType, x))

    @classmethod
    def makeNative(cls, x):
        if not hasattr(x, '__iter__') or isinstance(x, str):
            return x
        dataTypeName = list(x.keys())[0]
        data = x[dataTypeName]
        dataType = cls.NAME_MAPPING.get(dataTypeName, StorageToken)
        if dataType in cls.VALID_SEQUENCE_VALUE_TYPES:
            return dataType([cls.makeNative(val) for val in data])
        elif dataType in cls.VALID_MAPPING_VALUE_TYPES:
            return dataType([(cls.makeNative(key), cls.makeNative(val))
                             for key, val in data.items()])
        elif dataType == StorageToken:
            data = dict([(key, cls.makeNative(val))
                         for key, val in data.items()])
            token = StorageToken(data=data)
            return token
Esempio n. 12
0
def BuildSTBLChanges() -> bool:
    canBuildSTBL = STBL.CanBuildSTBL()  # type: bool

    if not canBuildSTBL:
        return False

    for package in Mod.GetCurrentMod().Packages:  # type: Mod.Package
        if not os.path.exists(package.STBLPath):
            continue

        for stblXMLFileName in os.listdir(package.STBLPath):  # type: str
            stblXMLFilePath = os.path.join(package.STBLPath,
                                           stblXMLFileName)  # type: str

            if os.path.isfile(stblXMLFilePath) and os.path.splitext(
                    stblXMLFileName)[1].casefold() == ".xml":
                manifestFilePath = os.path.splitext(
                    stblXMLFilePath)[0] + "_Manifest.json"  # type: str

                modifiedTime = os.path.getmtime(stblXMLFilePath)  # type: float
                builtModifiedTime = None  # type: typing.Optional[int]
                builtFileNames = list()  # type: typing.List[str]

                try:
                    if os.path.exists(manifestFilePath):
                        with open(manifestFilePath) as manifestFile:
                            manifest = json.JSONDecoder().decode(
                                manifestFile.read()
                            )  # type: typing.Dict[str, typing.Any]

                        if not isinstance(manifest, dict):
                            raise Exceptions.IncorrectTypeException(
                                manifest, "Root", (dict, ))

                        if ManifestBuiltModifiedTimeKey in manifest:
                            builtModifiedTime = manifest[
                                ManifestBuiltModifiedTimeKey]

                        if not isinstance(builtModifiedTime,
                                          float) and not isinstance(
                                              builtModifiedTime, int):
                            incorrectValue = builtModifiedTime  # type: typing.Any
                            builtModifiedTime = None
                            raise Exceptions.IncorrectTypeException(
                                incorrectValue,
                                "Root[%s]" % ManifestBuiltModifiedTimeKey,
                                (dict, ))

                        if ManifestBuiltFileNamesKey in manifest:
                            builtFileNames = manifest[
                                ManifestBuiltFileNamesKey]

                        if not isinstance(builtFileNames, list):
                            incorrectValue = builtFileNames  # type: typing.Any
                            builtFileNames = list()
                            raise Exceptions.IncorrectTypeException(
                                incorrectValue,
                                "Root[%s]" % ManifestBuiltFileNamesKey,
                                (dict, ))

                        for builtFileNameIndex in range(
                                len(builtFileNames)):  # type: int
                            builtFileName = builtFileNames[
                                builtFileNameIndex]  # type: str

                            if not isinstance(builtFileName, str):
                                builtFileNames = list()
                                raise Exceptions.IncorrectTypeException(
                                    builtFileName, "Root[%s][%s]" %
                                    (ManifestBuiltFileNamesKey,
                                     builtFileNameIndex), (dict, ))

                except Exception as e:
                    print("Failed to read STBL manifest file at '" +
                          manifestFilePath + "'\n" + str(e),
                          file=sys.stderr)

                missingBuiltFile = False  # type: bool

                for builtFileName in builtFileNames:
                    builtFilePath = os.path.join(
                        os.path.join(package.SourceLoosePath, "STBL"),
                        builtFileName)  # type: str

                    if not os.path.exists(builtFilePath):
                        missingBuiltFile = True
                        break

                if missingBuiltFile or modifiedTime != builtModifiedTime:
                    buildTempDirectory = stblXMLFilePath + "_Temp_Build"  # type: str

                    if not os.path.exists(buildTempDirectory):
                        os.makedirs(buildTempDirectory)

                    try:
                        STBL.BuildSTBL(buildTempDirectory, stblXMLFilePath)

                        manifest = dict()  # type: typing.Dict[str, typing.Any]

                        manifest[ManifestBuiltModifiedTimeKey] = modifiedTime
                        builtFileNames = list()

                        for builtFileName in os.listdir(buildTempDirectory):
                            builtFilePath = os.path.join(
                                buildTempDirectory, builtFileName)

                            if os.path.isfile(builtFilePath):
                                builtFileNames.append(builtFileName)

                        manifest[ManifestBuiltFileNamesKey] = builtFileNames

                        with open(manifestFilePath, "w+") as manifestFile:
                            manifestFile.write(
                                json.JSONEncoder(indent="\t").encode(manifest))

                        dir_util.copy_tree(
                            buildTempDirectory,
                            os.path.join(package.SourceLoosePath, "STBL"))
                    finally:
                        shutil.rmtree(buildTempDirectory)

    return True
Esempio n. 13
0
def calculate(file):
    start = file[7:21]
    starttm = int(
        time.mktime(
            (int(start[0:4]), int(start[4:6]), int(start[6:8]),
             int(start[8:10]), int(start[10:12]), int(start[12:14]), 0, 0, 0)))

    logging.info("start analyzing:" + file)
    #define reg
    req_re = re.compile(r"^(.+)(\d)_/seg(\d)[^\?]+(\d{9})")
    live_re = re.compile(r"^(.*)/live/(ld/flv|ld/trans|flv|trans)/")
    long_rate_re = re.compile(r'(\d+)_(\d+)')
    channel_re = re.compile(r'^([^\d\.]+[^\.]*)\..*')
    am_re = re.compile(r'^.+am=(\d+)')
    logs = open(log_dir + "/" + file, 'r').readlines()

    #init top_list
    top_list = {
        'hls_0': {
            'type': 1,
            'list': [],
            'users': {},
            "req_n": 0,
            "suc_n": 0,
            "suc_r": 0,
            "user_n": 0,
            "jam_n": 0,
            "freeze_r": 0,
            "flu": 0,
            "band": 0,
            "rate_n": {
                "0": 0,
                "1": 0,
                "2": 0,
                "3": 0,
                "4": 0
            },
            "bitrate": 0,
            "channel_n": {}
        },
        'hds_1': {
            'type': 1,
            'list': [],
            'users': {},
            "req_n": 0,
            "suc_n": 0,
            "suc_r": 0,
            "user_n": 0,
            "jam_n": 0,
            "freeze_r": 0,
            "flu": 0,
            "band": 0,
            "rate_n": {
                "0": 0,
                "1": 0,
                "2": 0,
                "3": 0,
                "4": 0
            },
            "bitrate": 0,
            "channel_n": {}
        },
        'ld/flv': {
            'type': 2,
            'list': [],
            'users': {},
            "req_n": 0,
            "suc_n": 0,
            "suc_r": 0,
            "user_n": 0,
            "jam_n": 0,
            "freeze_r": 0,
            "flu": 0,
            "band": 0,
            "rate_n": {
                "0": 0,
                "1": 0,
                "2": 0,
                "3": 0,
                "4": 0
            },
            "bitrate": 0,
            "channel_n": {}
        },
        'ld/trans': {
            'type': 2,
            'list': [],
            'users': {},
            "req_n": 0,
            "suc_n": 0,
            "suc_r": 0,
            "user_n": 0,
            "jam_n": 0,
            "freeze_r": 0,
            "flu": 0,
            "band": 0,
            "rate_n": {
                "0": 0,
                "1": 0,
                "2": 0,
                "3": 0,
                "4": 0
            },
            "bitrate": 0,
            "channel_n": {}
        },
        'flv': {
            'type': 2,
            'list': [],
            'users': {},
            "req_n": 0,
            "suc_n": 0,
            "suc_r": 0,
            "user_n": 0,
            "jam_n": 0,
            "freeze_r": 0,
            "flu": 0,
            "band": 0,
            "rate_n": {
                "0": 0,
                "1": 0,
                "2": 0,
                "3": 0,
                "4": 0
            },
            "bitrate": 0,
            "channel_n": {}
        },
        'trans': {
            'type': 2,
            'list': [],
            'users': {},
            "req_n": 0,
            "suc_n": 0,
            "suc_r": 0,
            "user_n": 0,
            "jam_n": 0,
            "freeze_r": 0,
            "flu": 0,
            "band": 0,
            "rate_n": {
                "0": 0,
                "1": 0,
                "2": 0,
                "3": 0,
                "4": 0
            },
            "bitrate": 0,
            "channel_n": {}
        },
    }
    total = {
        'user_list': [],
        'req_n': 0,
        'suc_n': 0,
        'jam_n': 0,
        'flu': 0,
        'band': 0,
        'rate_n': {
            "0": 0,
            "1": 0,
            '2': 0,
            '3': 0,
            '4': 0
        },
        'channel_n': {}
    }

    #format logs
    for l in logs:
        try:
            agent = l.split('"')[1].decode("utf-8", 'ignore')
        except:
            continue
        try:
            x_group = l.split(" ")
            # 0Begin_Time, 1User_IP, 2ResponseCode, 3Flu, 4Duration, 5Freeze_Count, 6Bitrate, 7Domain, 8Port, 9URI, 10UserAgent
            if len(x_group) < 11:
                continue
            ip = x_group[1]
            tim = int(x_group[0])
            status = bool(re.compile(r"^(2|3)\d{2}$").match(x_group[2]))
            flu = int(x_group[3])
            duration = int(x_group[4])
            # channel = x_group[7].split(".")[0]
            channel_ma = channel_re.match(x_group[7])
            am_ma = am_re.match(x_group[9])
            req_ma = req_re.match(x_group[9])
            live_ma = live_re.match(x_group[9])

            if channel_ma:
                channel = channel_ma.group(1)
            else:
                channel = "unknow"
            if am_ma:
                am = am_ma.group(1)
            else:
                am = "am"
            if req_ma:
                rate = str(int(req_ma.group(2)) % 5)
                # seg = req_ma.group(3)==u"1"
                seg = (x_group[9].find('-Frag') != -1)
                segnum = int(req_ma.group(4))
                r = (ip + agent, tim, status, channel, rate, seg, segnum, ip,
                     agent, flu, duration, am)
                if seg:
                    top_list['hds_1']['list'].append(r)
                else:
                    top_list['hls_0']['list'].append(r)
            elif live_ma:
                type = live_ma.group(2)
                rate = x_group[6]
                try:
                    live_jam = int(x_group[5]) > 0
                except:
                    live_jam = False
                r = (ip + agent, tim, status, channel, rate, "", live_jam, ip,
                     agent, flu, duration, am)
                if top_list.has_key(type):
                    top_list[type]['list'].append(r)
        except:
            pass

#analyze top_list
    for category_name in top_list:
        current_category = top_list[category_name]
        log_list = current_category['list']
        user_list = current_category['users']
        rate_list = current_category['rate_n']
        channel_list = current_category['channel_n']
        if current_category['type'] == 1:
            for l in log_list:
                if user_list.has_key(l[0]):
                    user_list[l[0]]["end"] = l[1]
                    user_list[l[0]]["seg_e"] = l[6]
                    user_list[l[0]]["req_n"] += 1
                    if l[2]:
                        user_list[l[0]]["suc_n"] += 1
                    user_list[l[0]]["flu"] += l[9]
                    user_list[l[0]]["duration"] += l[10]
                else:
                    user_list[l[0]] = {
                        # "log_time":starttm,
                        # "from":log_type,
                        "u_ip": l[7],
                        "req_n": 1,
                        "suc_n": 1 if l[2] else 0,
                        "start": l[1],
                        "end": l[1],
                        "seg_t": l[5],
                        "seg_s": l[6],
                        "seg_e": l[6],
                        "agent": l[8],
                        "flu": l[9],
                        "duration": l[10],
                        "am": l[11],
                        "rate_n": {
                            "0": 0,
                            "1": 0,
                            "2": 0,
                            "3": 0,
                            "4": 0
                        },
                        "channel_n": {},
                        "type": category_name
                    }

                if channel_list.has_key(l[3]):
                    channel_list[l[3]] += l[9]
                else:
                    channel_list[l[3]] = l[9]
                if total['channel_n'].has_key(l[3]):
                    total['channel_n'][l[3]] += l[9]
                else:
                    total['channel_n'][l[3]] = l[9]
                if user_list[l[0]]['channel_n'].has_key(l[3]):
                    user_list[l[0]]['channel_n'][l[3]] += l[9]
                else:
                    user_list[l[0]]['channel_n'][l[3]] = l[9]

                seg_mode_time = 4 if l[5] else 10
                if rate_list.has_key(l[4]):
                    rate_list[l[4]] += seg_mode_time
                else:
                    rate_list[l[4]] = seg_mode_time
                if user_list[l[0]]['rate_n'].has_key(l[4]):
                    user_list[l[0]]['rate_n'][l[4]] += seg_mode_time
                else:
                    user_list[l[0]]['rate_n'][l[4]] = seg_mode_time

                if l[2]:
                    current_category['suc_n'] += 1
                #flu total
                current_category['flu'] += l[9]

            for u in user_list:
                jam = ifjam(user_list[u])
                user_list[u]["jam"] = jam
                if jam:
                    current_category['jam_n'] += 1
                # user_list[u]["s_ip"] = server_ip
                del user_list[u]["seg_t"]
                del user_list[u]["seg_s"]
                del user_list[u]["seg_e"]

        elif current_category['type'] == 2:
            for l in log_list:
                if user_list.has_key(l[0]):
                    user_list[l[0]]["req_n"] += 1
                    if l[2]:
                        user_list[l[0]]["suc_n"] += 1
                    user_list[l[0]]["flu"] += l[9]
                    user_list[l[0]]["duration"] += l[10]
                else:
                    user_list[l[0]] = {
                        # "log_time":starttm,
                        # "from":log_type,
                        "u_ip": l[7],
                        "req_n": 1,
                        "suc_n": 1 if l[2] else 0,
                        "start": l[1],
                        "end": l[1],
                        "agent": l[8],
                        "jam": l[6],
                        # "s_ip": server_ip,
                        "flu": l[9],
                        "duration": l[10],
                        "am": l[11],
                        "rate_n": {
                            "0": 0,
                            "1": 0,
                            "2": 0,
                            "3": 0,
                            "4": 0
                        },
                        "channel_n": {},
                        "type": category_name
                    }
                if channel_list.has_key(l[3]):
                    channel_list[l[3]] += l[9]
                else:
                    channel_list[l[3]] = l[9]
                if total['channel_n'].has_key(l[3]):
                    total['channel_n'][l[3]] += l[9]
                else:
                    total['channel_n'][l[3]] = l[9]
                if user_list[l[0]]['channel_n'].has_key(l[3]):
                    user_list[l[0]]['channel_n'][l[3]] += l[9]
                else:
                    user_list[l[0]]['channel_n'][l[3]] = l[9]

                lrms = long_rate_re.findall(l[4])
                for lrm in lrms:
                    if int(lrm[0]) == 4000:
                        k = "0"
                    else:
                        k = str((2500 - int(lrm[0])) / 500)
                    if rate_list.has_key(k):
                        rate_list[k] += int(lrm[1])
                    else:
                        rate_list[k] = int(lrm[1])
                    if user_list[l[0]]['rate_n'].has_key(k):
                        user_list[l[0]]['rate_n'][k] += int(lrm[1])
                    else:
                        user_list[l[0]]['rate_n'][k] = int(lrm[1])

                if l[2]:
                    current_category['suc_n'] += 1
                #flu total
                current_category['flu'] += l[9]
            for u in user_list:
                if user_list[u]["jam"]:
                    current_category['jam_n'] += 1

        current_category['req_n'] = len(log_list)
        current_category['user_n'] = len(user_list)
        if current_category['req_n'] != 0:
            current_category['suc_r'] = round(
                float(current_category['suc_n'] * 100) /
                current_category['req_n'], 2)
        if len(user_list) != 0:
            current_category['freeze_r'] = round(
                float(current_category['jam_n'] * 100) / len(user_list), 2)
        current_category['band'] = round(
            float(current_category['flu']) * 8 / log_duration / 1000, 2)
        try:
            current_category['bitrate'] = (
                rate_list["0"] * 4000 + rate_list["1"] * 2000 +
                rate_list["2"] * 1500 + rate_list["3"] * 850 +
                rate_list["4"] * 500) / (rate_list["1"] + rate_list["2"] +
                                         rate_list["3"] + rate_list["4"])
        except:
            current_category['bitrate'] = 0

        #to total
        total['user_list'].extend(
            list(map(stringtify_user_obj, user_list.values())))
        total['req_n'] += current_category['req_n']
        total['suc_n'] += current_category['suc_n']
        total['jam_n'] += current_category['jam_n']
        total['flu'] += current_category['flu']
        total['band'] += current_category['band']
        total['rate_n']['1'] += current_category['rate_n']['1']
        total['rate_n']['2'] += current_category['rate_n']['2']
        total['rate_n']['3'] += current_category['rate_n']['3']
        total['rate_n']['4'] += current_category['rate_n']['4']
        #clear
        del current_category['type']
        del current_category['list']
        del current_category['users']


#add total keys
    user_list = total['user_list']
    log_info = top_list
    log_info['from'] = log_type
    log_info['version'] = code_version + ' ' + code_build
    log_info['duration'] = log_duration
    log_info['md5'] = md5_str
    log_info['s_ip'] = server_ip
    log_info['start'] = starttm
    log_info['req_n'] = total['req_n']
    log_info['suc_n'] = total['suc_n']
    if total['req_n'] != 0:
        log_info['suc_r'] = round(
            float(total['suc_n'] * 100) / total['req_n'], 2)
    log_info['user_n'] = len(user_list)
    log_info['jam_n'] = total['jam_n']
    if len(user_list) != 0:
        log_info['freeze_r'] = round(
            float(total['jam_n'] * 100) / len(user_list), 2)
    log_info['flu'] = total['flu']
    log_info['band'] = total['band']
    log_info['rate_n'] = total['rate_n']
    try:
        log_info['bitrate'] = (
            rate_list["0"] * 4000 + log_info['rate_n']["1"] * 2000 +
            log_info['rate_n']["2"] * 1500 + log_info['rate_n']["3"] * 850 +
            log_info['rate_n']["4"] * 500) / (
                log_info['rate_n']["1"] + log_info['rate_n']["2"] +
                log_info['rate_n']["3"] + log_info['rate_n']["4"])
    except:
        log_info['bitrate'] = 0
    log_info['channel_n'] = total['channel_n']

    #send to kafka
    user_list_json = json.JSONEncoder().encode({
        'log_time': starttm,
        'from': log_type,
        's_ip': server_ip,
        'users': user_list
    })
    log_info_json = json.JSONEncoder().encode(log_info)

    retry_time = 10
    log_state = False
    user_state = False
    global fail_times
    global last_fail_time
    while retry_time > 0:
        retry_time -= 1
        res = conn_kafka(user_list_json, log_info_json, log_state, user_state)
        log_state = res[0]
        user_state = res[1]
        if log_state and user_state:
            logging.info("complete analyzing:" + file)
            break
        time.sleep(5)
    if retry_time == 0:
        if time.time() - last_fail_time > 600:
            fail_times = 0
        else:
            last_fail_time = time.time()
        if fail_times > 10:
            logging.error("kill myself")
            os._exit(0)
        else:
            logging.error("Kafka error and retry failed")
            fail_times += 1
            raise TimeOutException()
Esempio n. 14
0
from __future__ import print_function
import calendar
import collections
import datetime
from enum import Enum
import json
import requests
import socket
import struct

# Disable SSL warning from requests - the Roomba's SSL certificate is self signed
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

# Monkey patch the json module to be able to encode Enums and datetime.time
_json_default = json.JSONEncoder().default


def _encode_enum(self, obj):
    if isinstance(obj, Enum):
        return obj.name
    if isinstance(obj, datetime.time):
        return str(obj)
    return _json_default(self, obj)


json.JSONEncoder.default = _encode_enum


class CarpetBoost(Enum):
    Unknown = -1
Esempio n. 15
0
    from urllib.parse import quote as _quote
    from urllib.parse import unquote as _unquote
    from urllib.parse import urlunsplit, urlsplit

    string_type = str
    bytes_type = bytes

else:
    from urllib.parse import quote as _quote
    from urllib.parse import unquote as _unquote
    from urllib.parse import urlunsplit, urlsplit

    string_type = str
    bytes_type = str

json_encoder = json.JSONEncoder()


def _extract_credentials(url):
    """
    Extract authentication (user name and password) credentials from the
    given URL.

    >>> _extract_credentials('http://*****:*****@localhost:5984/_config/')
    ('http://*****:*****@'
    ...                      'localhost:5984/_config/')
    ('http://*****:*****@example.com', 'secret'))
    """
Esempio n. 16
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import json
import pytest  # type: ignore
import six

from cmk.gui.utils.html import HTML


# Monkey patch in order to make the HTML class below json-serializable without changing the default json calls.
def _default(self, obj):
    return getattr(obj.__class__, "to_json", _default.default)(obj)


_default.default = json.JSONEncoder().default  # Save unmodified default.
json.JSONEncoder.default = _default  # replacement


@pytest.mark.parametrize("value", [
    None,
    "",
    123,
    123.4,
    "one",
    "Oneüლ,ᔑ•ﺪ͟͠•ᔐ.ლ",
])
def test_class_HTML_value(value):
    assert isinstance(HTML(value).value, six.text_type)
    assert HTML(HTML(value)) == HTML(value)
Esempio n. 17
0
parser.add_argument('--file', help='The file to open', required=True)
parser.add_argument('--line', help='The new line added to the file', required=True)

args = parser.parse_args()

args.line=args.line.strip()

line_exists=0

try:

	file=open(args.file, 'r')

except:
	
	print(json.JSONEncoder().encode([0, 'CANNOT_OPEN_FILE_FOR_READ']))
	exit(1)

for line in file:
	line=line.strip()
	
	if line == args.line:
		line_exists=1

file.close()

if line_exists==0:
	
	try:
	
		file=open(args.file, 'a')
Esempio n. 18
0
    the underlying dict
    """
    if type(obj) is frozendict:
        # fishing the protected dict out of the object is a bit nasty,
        # but we don't really want the overhead of copying the dict.
        return obj._dict
    raise TypeError("Object of type %s is not JSON serializable" %
                    obj.__class__.__name__)


# A custom JSON encoder which:
#   * handles frozendicts
#   * produces valid JSON (no NaNs etc)
#   * reduces redundant whitespace
json_encoder = json.JSONEncoder(allow_nan=False,
                                separators=(",", ":"),
                                default=_handle_frozendict)

# Create a custom decoder to reject Python extensions to JSON.
json_decoder = json.JSONDecoder(parse_constant=_reject_invalid_json)


def unwrapFirstError(failure):
    # defer.gatherResults and DeferredLists wrap failures.
    failure.trap(defer.FirstError)
    return failure.value.subFailure


@attr.s(slots=True)
class Clock:
    """
Esempio n. 19
0
    def my_secure_insert(self, **kwargs):
        
        values = {}
	for field_name, field_value in kwargs.items():
            values[field_name] = field_value

        if values['my_pie'] != "3.14":
            return "You touched my pie!!!"

        #Check if this form still exists
        if http.request.env['html.form'].sudo().search_count([('id','=',int(values['form_id']) )]) == 0:
            return "The form no longer exists"

        entity_form = http.request.env['html.form'].sudo().browse(int(values['form_id']))
                        
        ref_url = ""
        if 'Referer' in http.request.httprequest.headers:
            ref_url = http.request.httprequest.headers['Referer']
                       
        #Captcha Check
        if entity_form.captcha:
               
            #Redirect them back if they didn't answer the captcha
            if 'g-recaptcha-response' not in values:
                return werkzeug.utils.redirect(ref_url)         
            
            payload = {'secret': str(entity_form.captcha_secret_key), 'response': str(values['g-recaptcha-response'])}
	    response_json = requests.post("https://www.google.com/recaptcha/api/siteverify", data=payload)
                        
            if response_json.json()['success'] != True:
                return werkzeug.utils.redirect(ref_url)
        
        secure_values = {}
        history_values = {}
        return_errors = []
        insert_data_dict = []
        form_error = False
        
        #populate an array which has ONLY the fields that are in the form (prevent injection)
        for fi in entity_form.fields_ids:
            #Required field check
            if fi.setting_general_required and fi.html_name not in values:
                return_item = {"html_name": fi.html_name,"error_messsage": "This field is required"}
                return_errors.append(return_item)
                form_error = True
        
            if fi.html_name in values:
                method = '_process_html_%s' % (fi.field_type.html_type,)
	        action = getattr(self, method, None)
	        
	        if not action:
		    raise NotImplementedError('Method %r is not implemented on %r object.' % (method, self))
	
                field_valid = html_field_response()
                
	        field_valid = action(fi, values[fi.html_name])

	        if field_valid.error == "":
	            secure_values[fi.field_id.name] = field_valid.return_data
                    insert_data_dict.append({'field_id':fi.field_id.id, 'insert_value':field_valid.history_data})
                else:
                    return_item = {"html_name": fi.html_name,"error_messsage": field_valid.error}
                    return_errors.append(return_item)
                    form_error = True
                    
        if form_error:
            return json.JSONEncoder().encode({'status': 'error', 'errors':return_errors})
        else:
            new_history = http.request.env['html.form.history'].sudo().create({'ref_url':ref_url, 'html_id': entity_form.id})
            
            for insert_field in insert_data_dict:
                 new_history.insert_data.sudo().create({'html_id': new_history.id, 'field_id': insert_field['field_id'], 'insert_value': insert_field['insert_value'] })
            
            #default values
            for df in entity_form.defaults_values:
                if df.field_id.ttype == "many2many":
                    secure_values[df.field_id.name] = [(4, request.env[df.field_id.relation].search([('name','=',df.default_value)])[0].id )]
                else:
                    secure_values[df.field_id.name] = df.default_value
                
                new_history.insert_data.sudo().create({'html_id': new_history.id, 'field_id':df.field_id.id, 'insert_value':df.default_value})
        
            try:
                new_record = http.request.env[entity_form.model_id.model].sudo().create(secure_values)
            except Exception as e:
                return "Failed to insert record<br/>\n" + str(e)
                
            new_history.record_id = new_record.id
 
 
            #Execute all the server actions
            for sa in entity_form.submit_action:
             
                method = '_html_action_%s' % (sa.setting_name,)
 	        action = getattr(self, method, None)
 	        
 	        if not action:
 		    raise NotImplementedError('Method %r is not implemented on %r object.' % (method, self))
 	
 	        #Call the submit action, passing the action settings and the history object
                action(sa, new_history)
 
            if 'is_ajax_post' in values:
                return json.JSONEncoder().encode({'status': 'success', 'redirect_url':entity_form.return_url})
            else:
                return werkzeug.utils.redirect(entity_form.return_url)
Esempio n. 20
0
def publish_encoded_image(mqttc, topic, encoded):
    data = {"data": encoded, "time": timestamp()}
    mqttc.publish(topic, json.JSONEncoder().encode(data))
Esempio n. 21
0
def run_jsonhook(hook, spec, res, dsarg=None):
    """Execute a hook on a given result

    A hook definition's 'call' specification may contain placeholders that
    will be expanded using matching values in the given result record. In
    addition to keys in the result a '{dsarg}' placeholder is supported.
    The characters '{' and '}' in the 'call' specification that are not part
    of format() placeholders have to be escaped as '{{' and '}}'. Example
    'call' specification to execute the DataLad ``unlock`` command::

        unlock {{"dataset": "{dsarg}", "path": "{path}"}}

    Parameters
    ----------
    hook : str
      Name of the hook
    spec : dict
      Hook definition as returned by `get_hooks_from_config()`
    res : dict
      Result records that were found to match the hook definition.
    dsarg : Dataset or str or None, optional
      Value to substitute a {dsarg} placeholder in a hook 'call' specification
      with. Non-string values are automatically converted.

    Yields
    ------
    dict
      Any result yielded by the command executed as hook.
    """
    import datalad.api as dl
    cmd_name = spec['cmd']
    if not hasattr(dl, cmd_name):
        # TODO maybe a proper error result?
        lgr.warning(
            'Hook %s requires unknown command %s, skipped',
            hook, cmd_name)
        return
    cmd = getattr(dl, cmd_name)
    # apply potential substitutions on the string form of the args
    # for this particular result
    # take care of proper JSON encoding for each value
    enc = json.JSONEncoder().encode
    # we have to ensure JSON encoding of all values (some might be Path instances),
    # we are taking off the outer quoting, to enable flexible combination
    # of individual items in supplied command and argument templates
    args = spec['args'].format(
        # we cannot use a dataset instance directly but must take the
        # detour over the path location in order to have string substitution
        # be possible
        dsarg='' if dsarg is None else enc(dsarg.path).strip('"')
        if isinstance(dsarg, dl.Dataset) else enc(dsarg).strip('"'),
        # skip any present logger that we only carry for internal purposes
        **{k: enc(str(v)).strip('"') for k, v in res.items() if k != 'logger'})
    # now load
    try:
        args = json.loads(args)
    except Exception as e:
        from datalad.dochelpers import exc_str
        lgr.warning(
            'Invalid argument specification for hook %s '
            '(after parameter substitutions): %s [%s], '
            'hook will be skipped',
            hook, args, exc_str(e))
        return
    # only debug level, the hook can issue its own results and communicate
    # through them
    lgr.debug('Running hook %s: %s%s', hook, cmd_name, args)
    for r in cmd(**args):
        yield r
Esempio n. 22
0
shape = arr.shape
imageList = arr.flatten().tolist()

#location = 'localhost'
location = '192.168.1.82'
port = 80
serveradd = 'http://' + location + ':' + str(
    port) + '/cgi-bin/calculate_offload.py'

payload = {
    'key1': [[1, 2, 3], [2, 3, 4]],
    'key2': 200,
    'image': imageList,
    'shape': shape
}
mydata = json.JSONEncoder().encode(payload)

for i in range(10):
    t1 = datetime.datetime.now()
    r = requests.post(serveradd, data=mydata)
    backData = json.loads(r.text)
    backImage = backData['image']
    timeNow = backData['key1']
    newList = backData['key4']
    #print(r.text)
    t2 = datetime.datetime.now()
    tdif = t2 - t1
    print(str(tdif.microseconds / 1e6) + ' seconds')

print(timeNow)
print(newList)
Esempio n. 23
0
def create_vl(req_param):
    ret = req_by_msb("/openoapi/resmgr/v1/vl", "POST", json.JSONEncoder().encode(req_param))
    if ret[0] != 0:
        logger.error("Failed to create vl to resmgr. detail is %s.", ret[1])
Esempio n. 24
0
    def saveWeightInDB(self):
        beneficiary_id = self.b_id
        datecaptured = ""
        weight = ""
        result = {}
        allow_insert = 1
        # Get data from fields
        try:

            weight = self.myjson["Weight"]
            datecaptured = self.myjson["DateCaptured"]

        except Exception:
            #print "Content-type: text/html\n"
            result[
                "message"] = 'There was an error in processing a JSON object'
            return (json.JSONEncoder().encode(result))
            #sys.exit()

        if (weight == "None") or (datecaptured == "None"):
            #print "Content-type: text/html\n"
            result[
                "message"] = "Error: Some fields are missing. Please fill in both weight and date"
            return (json.JSONEncoder().encode(result))
            #sys.exit()

        try:
            #engine=create_engine('mysql://*****:*****@localhost/wellness', echo=False)
            engine = db
            # create a Session
            Session = sessionmaker(bind=engine)
            session = Session()

            # querying for a record in the physical_activity pattern table
            res = session.query(Weight).filter(
                Weight.beneficiary_id == beneficiary_id).filter(
                    Weight.datecaptured == datecaptured).first()
            if res is None:
                pass
            else:
                weightrecord = res
                previousweight = weightrecord.weight
                weightrecord.weight = weight

                allow_insert = 0
                #size=size-1 #ignore the last value because it has arleady been updated
                session.commit()
                result[
                    "message"] = "The weight for this date existed and it was updated from %s kg to %s kg" % (
                        previousweight, weight)

        except Exception as e:
            session.close()
            engine.dispose()
            dbconn.close()
            #print "Content-type: text/html\n"

            result["message"] = "Error: %s" % e
            #print
            return (json.JSONEncoder().encode(result))
            #sys.exit()

        if allow_insert == 1:
            try:
                #print "Content-Type: text/html\n"
                #engine=db
                # create a Session
                #Session = sessionmaker(bind=engine)

                #session = Session()

                # Create weight
                #new_food=FoodAndBeverage('KTLNTW00',datetime.date(1988,12,01))
                new_weight = Weight(beneficiary_id, weight, datecaptured)

                session.add(new_weight)

                # commit the record the database

                session.commit()
                result["message"] = "The weight was recorded sucessfully"

            except Exception as e:
                result["message"] = e
                session.close()
                engine.dispose()
                dbconn.close()
                return (json.JSONEncoder().encode(result))

        session.close()
        engine.dispose()
        dbconn.close()

        return (json.JSONEncoder().encode(result))
Esempio n. 25
0
from http.server import *
import socketserver
import socket
import time
import json
import sqlite3

JD = json.JSONDecoder()
JE = json.JSONEncoder()
events = dict()

mbeds = list()
mbed_eids = dict()  ##mbed event ids
mbed_queues = dict()  ##mbed events to be queued


def clear_queue(mbed_id):
    tuple_list = mbed_queues[mbed_id]
    string = "{"
    for name, val in tuple_list:
        string += '"' + str(name) + '":' + str(val) + ','
    string = string[:-1]
    string += '}'
    mbed_queues[mbed_id] = list()
    return string


def add_event_to_queue(mbed_id, conditions, event_id):
    event = [
        ("create", event_id),
    ] + conditions + [("end", 1)]
Esempio n. 26
0
def saveCache(postCache):
    with open(CACHE_FILE, "w") as fout:
        for chunk in json.JSONEncoder().iterencode(postCache):
            fout.write(chunk)
Esempio n. 27
0
from django import forms
from django.conf import settings
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.utils.html import conditional_escape
from django.utils.encoding import force_text
from django.utils.translation import get_language
from django.core.exceptions import ImproperlyConfigured
from django.forms.util import flatatt
import json


json_encode = json.JSONEncoder().encode

DEFAULT_CONFIG = {
    'skin': 'moono',
    'toolbar_Basic': [
        ['Source', '-', 'Bold', 'Italic']
    ],
    'toolbar_Full': [
        ['liststyle', 'Styles', 'Format', 'Bold', 'Italic', 'Underline', 'Strike', 'SpellChecker', 'Undo', 'Redo'],
        ['Image', 'Flash', 'Table', 'HorizontalRule'],
        ['TextColor', 'BGColor'],
        ['Smiley', 'SpecialChar'], ['Source'],
    ],
    'toolbar': 'Full',
    'height': 291,
    'width': 835,
    'filebrowserWindowWidth': 940,
    'filebrowserWindowHeight': 725,
Esempio n. 28
0
import json

print("start")
bigobject = json.JSONEncoder().encode({"foo": ["bar", "baz"]})

for chunk in json.JSONEncoder().iterencode(bigobject):
    print(chunk)
print("start")
Esempio n. 29
0
def update_log(key, value):
    if key != "":
        global log
        a = json.loads(log)
        a[key] = str(value)
        log = json.JSONEncoder().encode(a)
Esempio n. 30
0
def datetime_handler(obj):
    "Seperate handler to handle datetimes -> JSON"
    return (obj.isoformat() if isinstance(obj, (datetime, date)) else
            json.JSONEncoder().default(obj))