Пример #1
0
    def confirm_account(self, confirmation_token):
        '''
        Confirms user in database
        '''
        try:
            # Decode token
            jwt_manager = JWTConfirmEmailManagerFactory.new()
            brl_user = jwt_manager.get_confirmed_user(confirmation_token)
            user = self.store.read_user(brl_user)
        except NotInStoreException:
            raise NotFoundException("User '%s' doesn't exist" % brl_user)
        if user.confirmation_token == confirmation_token:
            if not user.active:  # Do not re-send things if already activated
                try:
                    register_signup(self.store, brl_user)
                except Exception as exc:
                    logger.error("Can't register sign-up in background! %s" % str(exc))

            user.active = True
            user.confirmation_date = datetime.datetime.now()
            self.store.update_user(user)
            jwt_auth_manager = JWTCredentialsManagerFactory.new(self.store)
            token = jwt_auth_manager.get_token_for(brl_user)

            return token, brl_user, user.ga_client_id

        else:
            raise NotFoundException("Invalid user or token")
Пример #2
0
    def update(self, value, collection, upsert=False, is_serialized=False):
        try:
            if is_serialized:
                serial = value
            else:
                serial = value.serialize()
            query = {'_id': serial['_id']}
            txn_k = update_if_current.SERIAL_TXN_COUNTER_KEY
            trx_record = hasattr(value, txn_k)  # Requires update_if_current check

            if trx_record:
                # Query updated
                query[txn_k] = getattr(value, txn_k)
                # Update in DB the counter if its a clean object
                serial[txn_k] = update_if_current.inc_txn_counter(query[txn_k])
                # Update object in memory with new counter
                # (otherwise 2 updates for same object will fail)
                setattr(value, txn_k, serial[txn_k])

            serial.pop('_id')
            self._update_collection(collection, query, {"$set": serial}, upsert, trx_record)

        except Exception, e:
            tb = traceback.format_exc()
            logger.error(e)
            logger.error(tb)
            raise e
Пример #3
0
    def get_block_info(self, brl_block):
        '''Check if auth_user can publish a block version specified by parameter block_version
         Returns:
            BlockInfo
         '''

        try:
            self.security.check_read_block(brl_block)
        except NotInStoreException:
            # In this case, the block doesnt exist, but return information of -1 and permissions
            return self._get_new_block_info(brl_block)

        block_info = BlockInfo()
        try:
            self.security.check_write_block(brl_block)
            block_info.can_write = True
        except ForbiddenException:
            block_info.can_write = False

        try:
            block = self._store.read_block(brl_block)
            block_info.last_version = block.last_version()
            block_info.private = self.security.is_private(brl_block)
        except Exception as e:
            tb = traceback.format_exc()
            logger.debug(tb)
            logger.error("Something went wrong with %s" % e)
            raise BiiServiceException('Something went wrong')

        return block_info
Пример #4
0
    def upload(self, firmware):
        '''Uploading the firmware to Arduino'''
        self.bii.user_io.out.write('Uploading...')
        build_command = 'make' if sys.platform != 'win32' else 'mingw32-make'
        if platform.system() == 'Linux':
            build_command = " sudo %s" % build_command
        build_command = "%s %s-upload" % (build_command, firmware)
        # This is needed for Arduino Leonardo boards
        # see:http://nicholaskell.wordpress.com/2012/08/11/arduino-leonardo-upload-from-makefile/
        arduino_settings = self.settings
        if arduino_settings.board == "leonardo":
            import serial
            import time
            ser = serial.Serial(
                port=arduino_settings.port,
                baudrate=1200,
                parity=serial.PARITY_NONE,
                stopbits=serial.STOPBITS_ONE,
                bytesize=serial.EIGHTBITS
            )
            while not ser.isOpen():
                pass
            ser.close()
            time.sleep(2)

        hive_paths = HivePaths(self.bii.current_folder)
        retcode, out = execute(build_command, self.bii.user_io, cwd=hive_paths.build)
        errors = out.strip().split(os.linesep)
        if retcode != 0 or 'Error' in errors[-1]:
            logger.error(out)
            raise BiiException('Upload failed')
        return True
def _get_action_label(action_name, kwargs):
    """For use a third dimension in Ga"""
    try:
        block_version_from_kwargs = lambda kwargs: str(BlockVersion(
                                                       BRLBlock("%s/%s/%s" % (kwargs["owner_name"],
                                                                kwargs["block_name"],
                                                                kwargs["branch_name"])),
                                                                kwargs["version"]))
        if action_name == "get_block_info":
            return str(BRLBlock("%s/%s/%s" % (kwargs["owner_name"],
                                              kwargs["block_name"],
                                              kwargs["branch_name"])))
        elif action_name == "get_version_delta_info":
            return block_version_from_kwargs(kwargs)
        elif action_name == "get_cells_snapshot":
            return str(BlockVersion.deserialize(kwargs["bson_data"]["data"]))
        elif action_name == "get_dep_table":
            return block_version_from_kwargs(kwargs)
        elif action_name == "get_published_resources":
            return ",".join([str(BlockVersion.deserialize(elem[0]))
                             for elem in kwargs["bson_data"]["data"]])
        elif action_name == "publish":
            return str(BlockVersion.deserialize(kwargs["bson_data"]["data"]
                                                [PublishRequest.SERIAL_TRACKED_KEY]))
        elif action_name == "get_renames":
            return ", ".join([str(BlockVersion.deserialize(elem))
                             for elem in kwargs["bson_data"]["data"]])
        else:
            return ""
    except Exception as e:
        logger.error("Error getting label for GA in bii_user_trace %s" % str(e))
        return ""
Пример #6
0
    def upload(self, firmware):
        '''Uploading the firmware to Arduino'''
        self.bii.user_io.out.write('Uploading...')
        build_command = 'make' if sys.platform != 'win32' else 'mingw32-make'
        if platform.system() == 'Linux':
            build_command = " sudo %s" % build_command
        build_command = "%s %s-upload" % (build_command, firmware)
        # This is needed for Arduino Leonardo boards
        # see:http://nicholaskell.wordpress.com/2012/08/11/arduino-leonardo-upload-from-makefile/
        arduino_settings = self.settings
        if arduino_settings.board == "leonardo":
            import serial
            import time
            ser = serial.Serial(port=arduino_settings.port,
                                baudrate=1200,
                                parity=serial.PARITY_NONE,
                                stopbits=serial.STOPBITS_ONE,
                                bytesize=serial.EIGHTBITS)
            while not ser.isOpen():
                pass
            ser.close()
            time.sleep(2)

        hive_paths = HivePaths(self.bii.current_folder)
        retcode, out = execute(build_command,
                               self.bii.user_io,
                               cwd=hive_paths.build)
        errors = out.strip().split(os.linesep)
        if retcode != 0 or 'Error' in errors[-1]:
            logger.error(out)
            raise BiiException('Upload failed')
        return True
Пример #7
0
 def __setitem__(self, old_name, new_name):
     assert isinstance(old_name, CellName)
     assert isinstance(new_name, CellName)
     if old_name == new_name:
         logger.error('Rename with the same name %s' % old_name)
         return
     super(Renames, self).__setitem__(old_name, new_name)
Пример #8
0
 def _compute_new(self, block_name, decls, policy, existing_block_names,
                  biiresponse):
     try:
         biiresponse.info("Looking for %s..." % block_name)
         # branches = self._store.read_tracks(block_name)
         # branches.get_blocks()
         block_candidates = [
             block_name + BranchName("%s/master" % block_name.user)
         ]
         block_candidates = policy.filter(block_candidates)
         delta_versions = self._filter_by_policy(block_candidates, policy,
                                                 biiresponse)
         logger.debug("The heap is %s" % delta_versions)
         result = self._define_hypothesis(delta_versions, decls,
                                          existing_block_names, biiresponse)
         return result
     except ForbiddenException:  # Propagate forbidden to client
         raise
     except NotInStoreException:
         biiresponse.warn("Can't find block candidate for: %s" %
                          (str(block_name)))
         return []
     except Exception:
         biiresponse.error("Fatal error in server while reading %s" %
                           block_name)
         logger.error(traceback.format_exc())
         return []
Пример #9
0
 def __setitem__(self, old_name, new_name):
     assert isinstance(old_name, CellName)
     assert isinstance(new_name, CellName)
     if old_name == new_name:
         logger.error('Rename with the same name %s' % old_name)
         return
     super(Renames, self).__setitem__(old_name, new_name)
    def _relative_match(self, block_cell_names, from_block_cell_name):
        ''' When import is relative the followed approach is
        Look for files within same block to match with import.
        '''
        python_import = self.python_import()
        result = set()
        if from_block_cell_name:
            try:
                block_name = from_block_cell_name.block_name
                normalized_import = normalized_name(python_import.module)

                for block_cell_name in [bcn for bcn in block_cell_names
                                        if bcn.block_name == block_name
                                        and bcn.endswith(normalized_import)]:
                    tail = os.path.split(block_cell_name)[1]
                    if len(normalized_import) >= len(tail):
                        # To avoid match like test.py pretest.py
                        result.add(block_cell_name)
                        self.collect_init_dependencies(block_cell_name, block_cell_names, result)

                return result
            except Exception as e:
                logger.error("Approximate find failed %s" % str(e))
                pass
        return set()
Пример #11
0
    def _relative_match(self, block_cell_names, from_block_cell_name):
        ''' When import is relative the followed approach is
        Look for files within same block to match with import.
        '''
        python_import = self.python_import()
        result = set()
        if from_block_cell_name:
            try:
                block_name = from_block_cell_name.block_name
                normalized_import = normalized_name(python_import.module)

                for block_cell_name in [
                        bcn for bcn in block_cell_names
                        if bcn.block_name == block_name
                        and bcn.endswith(normalized_import)
                ]:
                    tail = os.path.split(block_cell_name)[1]
                    if len(normalized_import) >= len(tail):
                        # To avoid match like test.py pretest.py
                        result.add(block_cell_name)
                        self.collect_init_dependencies(block_cell_name,
                                                       block_cell_names,
                                                       result)

                return result
            except Exception as e:
                logger.error("Approximate find failed %s" % str(e))
                pass
        return set()
Пример #12
0
    def get_block_info(self, brl_block):
        '''Check if auth_user can publish a block version specified by parameter block_version
         Returns:
            BlockInfo
         '''

        try:
            self.security.check_read_block(brl_block)
        except NotInStoreException:
            # In this case, the block doesnt exist, but return information of -1 and permissions
            return self._get_new_block_info(brl_block)

        block_info = BlockInfo()
        try:
            self.security.check_write_block(brl_block)
            block_info.can_write = True
        except ForbiddenException:
            block_info.can_write = False

        try:
            block = self._store.read_block(brl_block)
            block_info.last_version = block.last_version()
            block_info.private = self.security.is_private(brl_block)
        except Exception as e:
            tb = traceback.format_exc()
            logger.debug(tb)
            logger.error("Something went wrong with %s" % e)
            raise BiiServiceException('Something went wrong')

        return block_info
Пример #13
0
    def update(self, value, collection, upsert=False, is_serialized=False):
        try:
            if is_serialized:
                serial = value
            else:
                serial = value.serialize()
            query = {'_id': serial['_id']}
            txn_k = update_if_current.SERIAL_TXN_COUNTER_KEY
            trx_record = hasattr(value,
                                 txn_k)  # Requires update_if_current check

            if trx_record:
                # Query updated
                query[txn_k] = getattr(value, txn_k)
                # Update in DB the counter if its a clean object
                serial[txn_k] = update_if_current.inc_txn_counter(query[txn_k])
                # Update object in memory with new counter
                # (otherwise 2 updates for same object will fail)
                setattr(value, txn_k, serial[txn_k])

            serial.pop('_id')
            self._update_collection(collection, query, {"$set": serial},
                                    upsert, trx_record)

        except Exception, e:
            tb = traceback.format_exc()
            logger.error(e)
            logger.error(tb)
            raise e
Пример #14
0
    def set(self, key, value, expire_seconds=0):
        key = self._construct_key(key)
        try:
            ret = self.mc.set(key, value, time=int(expire_seconds))
        except Exception as exc:
            logger.error(exc)
            return None

        return ret
Пример #15
0
    def set(self, key, value, expire_seconds=0):
        key = self._construct_key(key)
        try:
            ret = self.mc.set(key, value, time=int(expire_seconds))
        except Exception as exc:
            logger.error(exc)
            return None

        return ret
Пример #16
0
def init_hive(bii, project_name=None, layout=None):
    """ Initializes an empty project
    """
    user_cache = bii.user_cache
    out = bii.user_io.out

    bii_paths = bii.bii_paths
    if bii_paths.current_dir.startswith(bii_paths.user_bii_home):
        raise BiiException(
            'Cannot create a project inside the user .biicode folder')

    try:
        bii_paths.project_root
        raise ClientException('Cannot create project inside other project')
    except NotInAHiveException:
        pass

    if project_name:
        name = ComplexName(project_name)
        current_dir = os.path.join(bii_paths.current_dir, name)
        bii_paths.current_dir = current_dir
    else:
        current_dir = bii_paths.current_dir
        ComplexName(os.path.basename(current_dir))

    for root, _, _ in os.walk(current_dir):
        if os.path.exists(os.path.join(root, BII_DIR, BII_HIVE_DB)):
            if root == current_dir:
                project_name = os.path.basename(current_dir)
                raise ClientException('Project "%s" already exists' %
                                      project_name)
            raise ClientException(
                'Cannot create project with other project inside:\n%s' % root)

    hive_disk_image = bii.hive_disk_image
    hive_disk_image.initialize()

    try:
        hive = Hive()
        hive_disk_image.hivedb.upsert_hive(hive)
        out.success('Successfully initialized biicode project %s' %
                    (project_name or ""))
    # If an exception is launched, the hive folder is deleted
    except BaseException as e:
        out.error('An error occurred while creating the project %s' % str(e))
        logger.error(traceback.format_exc())
        if project_name and os.path.exists(current_dir):
            hive_disk_image.hivedb.disconnect()
            shutil.rmtree(current_dir)
    else:
        layout_content = user_cache.layout(layout)
        if layout_content:
            save(os.path.join(hive_disk_image.paths.bii, "layout.bii"),
                 layout_content)
 def add_reverse_dependency_to(self, block_version, new_reverse_dep):
     block_version = LengthySerializedBlockVersion(block_version.block, block_version.time)
     new_reverse_dep = LengthySerializedBlockVersion(new_reverse_dep.block,
                                                     new_reverse_dep.time)
     dbcol = self.mongo_server_store.db[self.REVERSE_DEPENDENCIES_ST]
     try:
         tmp = {ReverseDependency.SERIAL_DEPS_ON_KEY: new_reverse_dep.serialize()}
         dbcol.update(block_version.serialize(), {"$addToSet": tmp})
     except Exception as e:
         logger.error(e)
         raise e
Пример #18
0
 def _begin_transaction(self, collection, brl, backup_content):
     try:
         dbcol = self.db[collection]
         transaction_definition = {'state': 'pending'}
         transaction_definition.update(backup_content)
         dbcol.update({'_id': brl.serialize(), 'state': 'initial'},
                      {"$set": transaction_definition})
     except OperationFailure as e:
         tb = traceback.format_exc()
         logger.error(tb)
         raise MongoStoreException(e)
Пример #19
0
def get_message_and_code_from_exception(exc):
    code = getHttpCodeFromException(exc)
    if code == 500:
        logger.error("%s" % str(exc))
        logger.error("%s" % traceback.format_exc())
        msg = ("An unexpected error has occurred in Bii service and has "
               "been reported. We hope to fix it as soon as possible")
        return msg, 500
    else:
        logger.info("Return code %s: %s" % (str(code), str(exc)))
        return exc.message, code
def get_message_and_code_from_exception(exc):
    code = getHttpCodeFromException(exc)
    if code == 500:
        logger.error("%s" % str(exc))
        logger.error("%s" % traceback.format_exc())
        msg = ("An unexpected error has occurred in Bii service and has "
               "been reported. We hope to fix it as soon as possible")
        return msg, 500
    else:
        logger.info("Return code %s: %s" % (str(code), str(exc)))
        return exc.message, code
    def solveCSP(self):
        '''search driver: returns TRUE is joint compatibility found'''
        self.__initSearch()
        if self.__root_hyp:
            self.preproc()
        self.__isSolFound = self.__expand(0)

        # Basic check of solution length
        if self.__isSolFound:
            if not self.__solSet or (len(self.__solSet[0]) != self.__nVAR):
                logger.error('Error in CSPExact: incorrect solution')
        return self.__isSolFound
Пример #22
0
    def _generate_project(self, toolchain_file, parameters):
        '''runs CMake to generate Makefiles or Project'''
        # Obtain generator
        settings = self.hive_disk_image.settings
        generator = settings.cmake.generator

        # Define toolchain if necessary, for arduino or cross building
        toolchain = '' if not toolchain_file else '-DCMAKE_TOOLCHAIN_FILE=%s' % toolchain_file

        # Define command to run
        parameters = ' '.join(parameters)
        cmake_rel_path = os.path.relpath(self.bii_paths.cmake,
                                         self.bii_paths.build)
        command = ('"%s" %s -G "%s" -Wno-dev %s %s' % (cmake_command(
            self.bii_paths), toolchain, generator, parameters, cmake_rel_path))
        self.user_io.out.write('Running: %s\n' % command)

        if 'NMake' in generator:
            # VS specific: it is neccesary to call vcvarall
            self.user_io.out.warn(
                'NMake generator must run in a shell with compiler defined.\n'
                'It might not work if not')
            command = command_with_vcvars(generator, self.bii_paths.build,
                                          command)

        retcode, cmake_output = execute(command,
                                        self.user_io,
                                        cwd=self.bii_paths.build)
        if 'Does not match the generator used previously' in cmake_output:
            try:
                self.user_io.out.warn(
                    'Previous generator does not match. Deleting build folder '
                    'and trying again')
                self.hive_disk_image.delete_build_folder()
            except Exception as e:
                self.user_io.out.warn('Could not complete deletion %s' %
                                      str(e))
            self.user_io.out.warn('Running cmake again')
            retcode, cmake_output = execute(command,
                                            self.user_io,
                                            cwd=self.bii_paths.build)
        if retcode != 0:
            logger.error(cmake_output)
            raise BiiException('CMake failed')

        if 'Eclipse' in self.hive_disk_image.settings.cmake.generator:
            ide = Eclipse(self.bii_paths)
            ide.configure_project()
            self.user_io.out.success(
                'Eclipse project in %s\n'
                'Open eclipse, select "File > Import > General > '
                'Existing project into Workspace" '
                'and select folder\n' % self.bii_paths.project_root)
Пример #23
0
 def __init__(self, version=None, message='', last_compatible="0"):
     self.version = ClientVersion(version) if version else ClientVersion(__version__)
     self.last_compatible = ClientVersion(last_compatible)
     self.download_url = ''
     try:
         # There was a str(message) here, but that is wrong, fails in Pydev
         self.messages = message.encode('utf-8')
     except UnicodeEncodeError:
         tb = traceback.format_exc()
         logger.error(tb)
         logger.error('Invalid server info message')
         self.messages = message.encode('ascii', 'ignore')
Пример #24
0
    def solveCSP(self):
        '''search driver: returns TRUE is joint compatibility found'''
        self.__initSearch()
        if self.__root_hyp:
            self.preproc()
        self.__isSolFound = self.__expand(0)

        # Basic check of solution length
        if self.__isSolFound:
            if not self.__solSet or (len(self.__solSet[0]) != self.__nVAR):
                logger.error('Error in CSPExact: incorrect solution')
        return self.__isSolFound
Пример #25
0
 def _begin_transaction(self, collection, brl, backup_content):
     try:
         dbcol = self.db[collection]
         transaction_definition = {'state': 'pending'}
         transaction_definition.update(backup_content)
         dbcol.update({
             '_id': brl.serialize(),
             'state': 'initial'
         }, {"$set": transaction_definition})
     except OperationFailure as e:
         tb = traceback.format_exc()
         logger.error(tb)
         raise MongoStoreException(e)
Пример #26
0
 def handle_preprocessor(self, text):
     closer = {'"': '"', "'": "'"}
     tokenized_code = self.tokenize_code(text)
     if 'INCLUDE' == tokenized_code[0] or 'include' == tokenized_code[0]:
         try:
             m = self.start_require_pattern.search(text)
             start = m.start() + 1
             c = closer[m.group()]
             end = text.find(c, start + 1)
             return start, end, text[start:end].strip()
         except:
             logger.error('Unable to parse require %s ' % text)
     return (None, None, None)
Пример #27
0
 def __init__(self, version=None, message='', last_compatible="0"):
     self.version = ClientVersion(version) if version else ClientVersion(
         __version__)
     self.last_compatible = ClientVersion(last_compatible)
     self.download_url = ''
     try:
         # There was a str(message) here, but that is wrong, fails in Pydev
         self.messages = message.encode('utf-8')
     except UnicodeEncodeError:
         tb = traceback.format_exc()
         logger.error(tb)
         logger.error('Invalid server info message')
         self.messages = message.encode('ascii', 'ignore')
Пример #28
0
 def _check_banned(self):
     '''Check if the ip is banned'''
     ip_address = get_user_ip()
     info = self._read_info(ip_address)
     if self._is_banned(info) and not self._ban_expired(info):
         logger.error(" BANNED IP BLOCKED! " + str(ip_address)
                      + " Count: " + str(info.counter)
                      + " Time left: " + str(self._ban_time_left(info)) + " s.")
         raise self.banned_http_response
     elif self._is_banned(info) and self._ban_expired(info):
         info = _reset_info()
     logger.debug("IP: %s, Time: %s Count: %s" % (ip_address, info.time, info.counter))
     return info, ip_address
Пример #29
0
 def handle_preprocessor(self, text):
     closer = {'"': '"',
               '\'': '\''}
     tokenized_code = self.tokenize_code(text)
     if 'require' == tokenized_code[0] or 'require' == tokenized_code[1]:  # TODO: Do not tokenize the full preprocesor directive
         try:
             m = self.start_require_pattern.search(text)
             start = m.start() + 1
             c = closer[m.group()]
             end = text.find(c, start + 1)
             return start, end, text[start:end].strip()
         except:
             logger.error('Unable to parse require %s ' % text)
     return (None, None, None)
Пример #30
0
 def __initSearch(self):
     '''sets initial variable for the search;
        must be called after constructor'''
     if self.__nVAR == None or self.__csp == None:
         logger.error('invalid CSPExact')
         return -1        # possibly raise exception?
     else:
         self.__pathdict = {}
         self.__nSteps = 0
         self.__nPropagations = 0
         self.__depth_max = -1
         self.__isSolFound = False
         self.__solSet = []
         self.__history = []
    def upsert_reverse_dependencies(self, reverse_dependency):
        '''Upsert the reverse dependencies of a BlockVersion.
        reverse_dependency is a ReverseDependency object'''

        try:
            # Query by all block version fields (compose key) (auto ID)
            serial = reverse_dependency.serialize()
            query = reverse_dependency.version.serialize()
            self.mongo_server_store._update_collection(self.REVERSE_DEPENDENCIES_ST,
                                    query, {"$set": serial}, upsert=True, trx_record=None)

        except Exception as e:
            logger.error(e)
            raise e
Пример #32
0
 def handle_preprocessor(self, text):
     closer = {'"': '"', '\'': '\''}
     tokenized_code = self.tokenize_code(text)
     if 'require' == tokenized_code[0] or 'require' == tokenized_code[
             1]:  # TODO: Do not tokenize the full preprocesor directive
         try:
             m = self.start_require_pattern.search(text)
             start = m.start() + 1
             c = closer[m.group()]
             end = text.find(c, start + 1)
             return start, end, text[start:end].strip()
         except:
             logger.error('Unable to parse require %s ' % text)
     return (None, None, None)
 def handle_preprocessor(self, text):
     closer = {'"': '"',
               "'": "'"}
     tokenized_code = self.tokenize_code(text)
     if 'INCLUDE' == tokenized_code[0] or 'include' == tokenized_code[0]:
         try:
             m = self.start_require_pattern.search(text)
             start = m.start() + 1
             c = closer[m.group()]
             end = text.find(c, start + 1)
             return start, end, text[start:end].strip()
         except:
             logger.error('Unable to parse require %s ' % text)
     return (None, None, None)
 def __initSearch(self):
     '''sets initial variable for the search;
        must be called after constructor'''
     if self.__nVAR == None or self.__csp == None:
         logger.error('invalid CSPExact')
         return -1  # possibly raise exception?
     else:
         self.__pathdict = {}
         self.__nSteps = 0
         self.__nPropagations = 0
         self.__depth_max = -1
         self.__isSolFound = False
         self.__solSet = []
         self.__history = []
Пример #35
0
 def handle_preprocessor(self, text):
     closer = {'"': '"', '<': '>'}
     tokens = self.tokenize_code(text)
     if len(tokens) > 1 and tokens[1] in ('include', 'import'):
         # TODO: Do not tokenize the full preprocessor directive
         try:
             m = self.start_include_pattern.search(text)
             start = m.start() + 1
             c = closer[m.group()]
             end = text.find(c, start + 1)
             return start, end, text[start:end].strip()
         except:
             logger.error('Unable to parse include %s ' % text)
     return None, None, None
Пример #36
0
 def handle_preprocessor(self, text):
     closer = {'"': '"', '<': '>'}
     tokens = self.tokenize_code(text)
     if len(tokens) > 1 and tokens[1] in ('include', 'import'):
         # TODO: Do not tokenize the full preprocessor directive
         try:
             m = self.start_include_pattern.search(text)
             start = m.start() + 1
             c = closer[m.group()]
             end = text.find(c, start + 1)
             return start, end, text[start:end].strip()
         except:
             logger.error('Unable to parse include %s ' % text)
     return None, None, None
Пример #37
0
def init_hive(bii, project_name=None, layout=None):
    """ Initializes an empty project
    """
    user_cache = bii.user_cache
    out = bii.user_io.out

    bii_paths = bii.bii_paths
    if bii_paths.current_dir.startswith(bii_paths.user_bii_home):
        raise BiiException('Cannot create a project inside the user .biicode folder')

    try:
        bii_paths.project_root
        raise ClientException('Cannot create project inside other project')
    except NotInAHiveException:
        pass

    if project_name:
        name = ComplexName(project_name)
        current_dir = os.path.join(bii_paths.current_dir, name)
        bii_paths.current_dir = current_dir
    else:
        current_dir = bii_paths.current_dir
        ComplexName(os.path.basename(current_dir))

    for root, _, _ in os.walk(current_dir):
        if os.path.exists(os.path.join(root, BII_DIR, BII_HIVE_DB)):
            if root == current_dir:
                project_name = os.path.basename(current_dir)
                raise ClientException('Project "%s" already exists' % project_name)
            raise ClientException('Cannot create project with other project inside:\n%s' % root)

    hive_disk_image = bii.hive_disk_image
    hive_disk_image.initialize()

    try:
        hive = Hive()
        hive_disk_image.hivedb.upsert_hive(hive)
        out.success('Successfully initialized biicode project %s' % (project_name or ""))
    # If an exception is launched, the hive folder is deleted
    except BaseException as e:
        out.error('An error occurred while creating the project %s' % str(e))
        logger.error(traceback.format_exc())
        if project_name and os.path.exists(current_dir):
            hive_disk_image.hivedb.disconnect()
            shutil.rmtree(current_dir)
    else:
        layout_content = user_cache.layout(layout)
        if layout_content:
            save(os.path.join(hive_disk_image.paths.bii, "layout.bii"), layout_content)
Пример #38
0
 def wrapped(*args, **kwargs):
     max_iterations = kwargs.pop("max_iterations", 100)  # Limit, then raise Exception
     max_uncouple_ms = kwargs.pop("max_uncouple_ms", 50)
     for i in range(max_iterations):
         try:
             logger.debug("Start try safe txn, try %s" % i)
             ret = fn(*args, **kwargs)
             logger.debug("Completed try safe txn %s" % ret)
             return ret
         except MongoNotCurrentObjectException, e:
             logger.error(str(e))
             wait_ms = choice(range(max_uncouple_ms))
             logger.debug("Waiting %s miliseconds..." % wait_ms)
             time.sleep(wait_ms / 1000)
             continue
Пример #39
0
    def parse(self, biiresponse):
        if isinstance(self.cell, VirtualCell):
            biiresponse.error("You're trying to parse a virtual file: %s" % self.cell.name)
            return False

        try:
            if self.content.parser is None:
                self.content.parser = parser_factory(self.cell.type, self.cell.name.cell_name)
            if self.content.parser:
                self.content.parse()
                self.cell.hasMain = self.content.parser.has_main_function()
                self.cell.dependencies.update_declarations(self.content.parser.explicit_declarations)
        except Exception as e:
            logger.error(str(e))
            biiresponse.error("Error parsing %s file" % self.cell.name)
Пример #40
0
 def wrapped(*args, **kwargs):
     max_iterations = kwargs.pop("max_iterations",
                                 100)  # Limit, then raise Exception
     max_uncouple_ms = kwargs.pop("max_uncouple_ms", 50)
     for i in range(max_iterations):
         try:
             logger.debug("Start try safe txn, try %s" % i)
             ret = fn(*args, **kwargs)
             logger.debug("Completed try safe txn %s" % ret)
             return ret
         except MongoNotCurrentObjectException, e:
             logger.error(str(e))
             wait_ms = choice(range(max_uncouple_ms))
             logger.debug("Waiting %s miliseconds..." % wait_ms)
             time.sleep(wait_ms / 1000)
             continue
Пример #41
0
    def execute(self, argv):
        '''Executes given command
        @param argv: array containing command and its parameters
        '''

        #Obtain method, group and class
        try:
            if '--quiet' in argv:
                argv.remove('--quiet')
                self.bii.user_io.out.level = WARN
            elif '--verbose' in argv:
                argv.remove('--verbose')
                self.bii.user_io.out.level = DEBUG

            command = argv[0]
            if command == '--help' or command == '-h':
                self.catalog.print_help(self.bii.user_io.out, argv[1:])
                return
            elif command == '-v' or command == '--version':
                from biicode.common import __version__
                self.bii.user_io.out.write(str(__version__) + '\n')
                return

            method, _, class_ = self._get_method(command)
        except Exception as e:
            tb = traceback.format_exc()
            logger.debug(argv)
            raise ClientException(
                'None or bad command. Type "bii --help" for available commands'
            )

        #Obtain delegate object
        try:
            instance = class_(self.bii)
        except Exception as e:
            tb = traceback.format_exc()
            logger.error(e)
            logger.error(tb)
            raise BiiException('Internal error: %s tool cannot be created ' %
                               class_.__name__)

        #run bii:work if necessary to process local changes, except for the xxx:exe method
        #if '-h' not in argv and '--help' not in argv:
        #    self._migrate_hive(group)
        #Effective call
        self._migrate_hive()
        self._call_method(argv, method, instance)
Пример #42
0
 def increment_event_counter(self, info, ip_address):
     '''Increments event counter and check for banned or reset by time'''
     try:
         if not self._count_expired(info):  # Not expired counter
             counter = info.counter + 1
             if counter >= self.max_events:
                 logger.error("BEGINS BANNED IP! " + ip_address)
                 now_time = time.time()  # Now begins banned time
                 self.__notify_ip_banned(ip_address, counter, now_time)
             else:
                 now_time = info.time
             info = BlockerInformationPair(counter, now_time)
         else:
             info = BlockerInformationPair(1, time.time())
         self._set_info(ip_address, info)
     except Exception as exc:
         logger.error("Error increment_event_counter from memcache: %s" % str(exc))
Пример #43
0
 def _getBson(self):
     ''' If the ``Content-Type`` header is ``application/bson``, this
         property holds the parsed content of the request body. Only requests
         smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
         exhaustion. '''
     max_size = BII_MAX_MEMORY_PER_REQUEST
     if request.headers['Content-Type'] == 'application/bson':
         if 0 < request.content_length < max_size:
             return decode_bson(request.body.read(max_size))
         else:
             logger.error("Max size of bson for request: %i" % request.content_length)
             # DO NOT REMOVE: BODY NEEDS TO BE READED BEFORE RAISE, IT SEEMS LIKE A BOTTLE BUG
             request.body.read(0)
             raise BSONBottlePluginException("Max request size overtaken")
     else:
         raise BSONBottlePluginException("Not Bson request in a method with bson_param specified")
     return None
Пример #44
0
def factory(dbpath):
    try:
        if not os.path.exists(dbpath):
            folder = os.path.dirname(dbpath)
            if not os.path.exists(folder):
                os.makedirs(folder)
            db = HiveDB(dbpath)
            db.connect()
            # Init database with last migration, we are creating it with last version
            db.init(get_client_migrations().pop())
        else:
            db = HiveDB(dbpath)
            db.connect()
        return db
    except Exception as e:
        logger.error(e)
        raise ClientException("Could not initialize local cache", e)
Пример #45
0
def factory(dbpath):
    try:
        if not os.path.exists(dbpath):
            folder = os.path.dirname(dbpath)
            if not os.path.exists(folder):
                os.makedirs(folder)
            db = HiveDB(dbpath)
            db.connect()
            # Init database with last migration, we are creating it with last version
            db.init(get_client_migrations().pop())
        else:
            db = HiveDB(dbpath)
            db.connect()
        return db
    except Exception as e:
        logger.error(e)
        raise ClientException("Could not initialize local cache", e)
Пример #46
0
    def parse(self, biiresponse):
        if isinstance(self.cell, VirtualCell):
            biiresponse.error("You're trying to parse a virtual file: %s" %
                              self.cell.name)
            return False

        try:
            if self.content.parser is None:
                self.content.parser = parser_factory(self.cell.type,
                                                     self.cell.name.cell_name)
            if self.content.parser:
                self.content.parse()
                self.cell.hasMain = self.content.parser.has_main_function()
                self.cell.dependencies.update_declarations(
                    self.content.parser.explicit_declarations)
        except Exception as e:
            logger.error(str(e))
            biiresponse.error("Error parsing %s file" % self.cell.name)
Пример #47
0
    def _generate_project(self, toolchain_file, parameters):
        '''runs CMake to generate Makefiles or Project'''
        # Obtain generator
        settings = self.hive_disk_image.settings
        generator = settings.cmake.generator

        # Define toolchain if necessary, for arduino or cross building
        toolchain = '' if not toolchain_file else '-DCMAKE_TOOLCHAIN_FILE=%s' % toolchain_file

        # Define command to run
        parameters = ' '.join(parameters)
        cmake_rel_path = os.path.relpath(self.bii_paths.cmake, self.bii_paths.build)
        command = ('"%s" %s -G "%s" -Wno-dev %s %s'
                   % (cmake_command(self.bii_paths), toolchain, generator, parameters,
                      cmake_rel_path))
        self.user_io.out.write('Running: %s\n' % command)

        if 'NMake' in generator:
            # VS specific: it is neccesary to call vcvarall
            self.user_io.out.warn('NMake generator must run in a shell with compiler defined.\n'
                                  'It might not work if not')
            command = command_with_vcvars(generator, self.bii_paths.build, command)

        retcode, cmake_output = execute(command, self.user_io, cwd=self.bii_paths.build)
        if 'Does not match the generator used previously' in cmake_output:
            try:
                self.user_io.out.warn('Previous generator does not match. Deleting build folder '
                                      'and trying again')
                self.hive_disk_image.delete_build_folder()
            except Exception as e:
                self.user_io.out.warn('Could not complete deletion %s' % str(e))
            self.user_io.out.warn('Running cmake again')
            retcode, cmake_output = execute(command, self.user_io, cwd=self.bii_paths.build)
        if retcode != 0:
            logger.error(cmake_output)
            raise BiiException('CMake failed')

        if 'Eclipse' in self.hive_disk_image.settings.cmake.generator:
            ide = Eclipse(self.bii_paths)
            ide.configure_project()
            self.user_io.out.success('Eclipse project in %s\n'
                                     'Open eclipse, select "File > Import > General > '
                                     'Existing project into Workspace" '
                                     'and select folder\n' % self.bii_paths.project_root)
Пример #48
0
    def execute(self, argv):
        '''Executes given command
        @param argv: array containing command and its parameters
        '''

        #Obtain method, group and class
        try:
            if '--quiet' in argv:
                argv.remove('--quiet')
                self.bii.user_io.out.level = WARN
            elif '--verbose' in argv:
                argv.remove('--verbose')
                self.bii.user_io.out.level = DEBUG

            command = argv[0]
            if command == '--help' or command == '-h':
                self.catalog.print_help(self.bii.user_io.out, argv[1:])
                return
            elif command == '-v' or command == '--version':
                from biicode.common import __version__
                self.bii.user_io.out.write(str(__version__) + '\n')
                return

            method, _, class_ = self._get_method(command)
        except Exception as e:
            tb = traceback.format_exc()
            logger.debug(argv)
            raise ClientException('None or bad command. Type "bii --help" for available commands')

        #Obtain delegate object
        try:
            instance = class_(self.bii)
        except Exception as e:
            tb = traceback.format_exc()
            logger.error(e)
            logger.error(tb)
            raise BiiException('Internal error: %s tool cannot be created ' % class_.__name__)

        #run bii:work if necessary to process local changes, except for the xxx:exe method
        #if '-h' not in argv and '--help' not in argv:
        #    self._migrate_hive(group)
        #Effective call
        self._migrate_hive()
        self._call_method(argv, method, instance)
Пример #49
0
def smart_deserialize(cls, data):
    """KNOWN PROBLEM: this deserializer simply ignores data fields not
    represented in the smart_serial map, without any warning or user info.
    In such way it is robust to changes, but for example an error in Settings
    as writing compilr: instead of compiler: are not detected, simply ignored"""
    t = cls()
    for field, (key, cls1, cls2) in cls.smart_serial.iteritems():
        d = data.get(key)
        if d:
            if cls1:
                d = cls1.deserialize(d)
        else:
            if cls2:
                d = cls2()
        setattr(t, field, d)
    for field in data.keys():
        if field not in cls.smart_serial:
            logger.error("Error in %s: %s" % (cls.__name__, field))
    return t
Пример #50
0
def smart_deserialize(cls, data):
    '''KNOWN PROBLEM: this deserializer simply ignores data fields not
    represented in the smart_serial map, without any warning or user info.
    In such way it is robust to changes, but for example an error in Settings
    as writing compilr: instead of compiler: are not detected, simply ignored'''
    t = cls()
    for field, (key, cls1, cls2) in cls.smart_serial.iteritems():
        d = data.get(key)
        if d:
            if cls1:
                d = cls1.deserialize(d)
        else:
            if cls2:
                d = cls2()
        setattr(t, field, d)
    for field in data.keys():
        if field not in cls.smart_serial:
            logger.error('Error in %s: %s' % (cls.__name__, field))
    return t
Пример #51
0
    def match(self, block_cell_names, origin_block_cell_name=None, paths=None):
        #Try absolute
        bcn = self._block_cell_name()
        if bcn in block_cell_names:
            return set([bcn])

        #Try relative
        try:
            self.extension_namelist()
            for name in self.extension_name:
                name_ext = os.path.normpath(
                    os.path.join(os.path.dirname(origin_block_cell_name),
                                 name))
                brl = BlockCellName(name_ext)
                if brl in block_cell_names:
                    return set([brl])
        except:
            pass

        # Try APPROXIMATE, only in same block
        if origin_block_cell_name:
            try:
                block_name = origin_block_cell_name.block_name
                normalized_include = self.normalizedName
                result = set()
                for name in block_cell_names:
                    if name.block_name == block_name:  # Approximate only find in same Block
                        if name.endswith(normalized_include):
                            tail = os.path.split(name)[1]
                            if len(normalized_include) >= len(tail):
                                result.add(name)

                if len(result) == 1:
                    return result

                #TODO: Inform user of multiple matchs
                logger.info("Matchs for name %s are %s" % (self.name, result))
            except Exception as e:
                logger.error("Approximate find failed %s" % str(e))
                pass

        return set()
Пример #52
0
    def match(self, block_cell_names, origin_block_cell_name=None, paths=None):
        #Try absolute
        try:
            brl = BlockCellName(self.name)
            if brl in block_cell_names:
                return set([brl])
        except:
            pass

        #Try relative
        try:
            name = os.path.normpath(os.path.join(os.path.dirname(origin_block_cell_name),
                                                 self.name))
            brl = BlockCellName(name)
            if brl in block_cell_names:
                return set([brl])
        except:
            pass

        # Try APPROXIMATE, only in same block
        if origin_block_cell_name:
            try:
                block_name = origin_block_cell_name.block_name
                result = set()
                for name in block_cell_names:
                    if name.block_name == block_name:  # Approximate only find in same Block
                        if name.endswith(self.name):
                            tail = os.path.split(name)[1]
                            if len(self.name) >= len(tail):
                                result.add(name)

                if len(result) == 1:
                    return result

                #TODO: Inform user of multiple matchs
                logger.debug("Matchs for name %s are %s" % (self.name, result))
            except Exception as e:
                logger.error("Approximate find failed %s" % str(e))
                pass

        return set()
Пример #53
0
 def _compute_new(self, block_name, decls, policy, existing_block_names, biiresponse):
     try:
         biiresponse.info("Looking for %s..." % block_name)
         # branches = self._store.read_tracks(block_name)
         # branches.get_blocks()
         block_candidates = [block_name + BranchName("%s/master" % block_name.user)]
         block_candidates = policy.filter(block_candidates)
         delta_versions = self._filter_by_policy(block_candidates, policy, biiresponse)
         logger.debug("The heap is %s" % delta_versions)
         result = self._define_hypothesis(delta_versions, decls,
                                          existing_block_names, biiresponse)
         return result
     except ForbiddenException:  # Propagate forbidden to client
         raise
     except NotInStoreException:
         biiresponse.warn("Can't find block candidate for: %s" % (str(block_name)))
         return []
     except Exception:
         biiresponse.error("Fatal error in server while reading %s" % block_name)
         logger.error(traceback.format_exc())
         return []
Пример #54
0
def _match_find(a, b, alo, blo, ahi, bhi, answer, maxrecursion):
    if maxrecursion < 0:
        logger.error('Internal merge error')
        # this will never happen normally, this check is to prevent DOS attacks
        return
    oldlength = len(answer)
    if alo == ahi or blo == bhi:
        return
    last_a_pos = alo - 1
    last_b_pos = blo - 1
    for apos, bpos in _lcs_unique(a[alo:ahi], b[blo:bhi]):
        # recurse between lines which are unique in each file and match
        apos += alo
        bpos += blo
        # Most of the time, you will have a sequence of similar entries
        if last_a_pos + 1 != apos or last_b_pos + 1 != bpos:
            _match_find(a, b, last_a_pos + 1, last_b_pos + 1, apos, bpos, answer, maxrecursion - 1)
        last_a_pos = apos
        last_b_pos = bpos
        answer.append((apos, bpos))
    if len(answer) > oldlength:
        # find matches between the last match and the end
        _match_find(a, b, last_a_pos + 1, last_b_pos + 1, ahi, bhi, answer, maxrecursion - 1)
    elif a[alo] == b[blo]:
        # find matching lines at the very beginning
        while alo < ahi and blo < bhi and a[alo] == b[blo]:
            answer.append((alo, blo))
            alo += 1
            blo += 1
        _match_find(a, b, alo, blo, ahi, bhi, answer, maxrecursion - 1)
    elif a[ahi - 1] == b[bhi - 1]:
        # find matching lines at the very end
        nahi = ahi - 1
        nbhi = bhi - 1
        while nahi > alo and nbhi > blo and a[nahi - 1] == b[nbhi - 1]:
            nahi -= 1
            nbhi -= 1
        _match_find(a, b, last_a_pos + 1, last_b_pos + 1, nahi, nbhi, answer, maxrecursion - 1)
        for i in xrange(ahi - nahi):
            answer.append((nahi + i, nbhi + i))
Пример #55
0
    def create_multi(self, values, collection, enable_update_if_current=False):
        dbcol = self.db[collection]
        txn_k = update_if_current.SERIAL_TXN_COUNTER_KEY

        serials = []
        for v in values:
            if enable_update_if_current:
                update_if_current.enable_check_for(v)
            serial = v.serialize()
            if hasattr(v, txn_k):
                serial[txn_k] = 0
                setattr(v, txn_k, 0)
            serials.append(serial)

        try:
            id_or_error = dbcol.insert(serials, getLastError=1)
            if isinstance(id_or_error, basestring) \
                and '_id' in serial and id_or_error != serial['_id']:
                raise BiiStoreException(id_or_error)
        except Exception, e:
            logger.error(traceback.format_exc())
            raise e