Пример #1
0
    def __init__(self,config_parser):
        # Connect to engine
        database_path  = get_from_config_parser(config_parser,'Database','path','database')
        database_debug = get_boolean_from_config_parser(config_parser,'Database','debug',False)
        dir = os.path.dirname(database_path)
        if not os.path.exists(dir):
            mkdir(dir)
        sys.stderr.write('Connecting to database at "%s"\n' % database_path)
        self._engine = create_engine('sqlite:///%s' % database_path,echo=database_debug)

        # Start session
        Session = sessionmaker(bind=self._engine)
        self._session = Session()
        # Initialize feed storage
        self._feed_storage = FeedStorage(self._engine,self._session)
        # Initialize item storage
        self._item_storage = ItemStorage(self._engine,self._session)
        # A list of subprocess.Popen processes that will be maintained
        # by the Coffer object.
        self._external_processes = []
        # File storage (data dump)
        file_storage_path = get_from_config_parser(config_parser,'FileStorage','path','datadump')
        max_block_size    = get_int_from_config_parser(config_parser,'FileStorage','max-block-size',
                                                       file_storage.DEFAULT_MAX_BLOCK_SIZE)
        bzip2_path = get_from_config_parser(config_parser,'FileStorage','bzip2-path','/usr/bin/bzip2')
        self._file_storage = FileStorage(self._external_processes,file_storage_path,
                                         max_block_size,bzip2_path)
        # Content fetcher configuration
        self._fetcher = Fetcher(config_parser)
Пример #2
0
    def post(self, request):
        serializer = UserRegisterSerializer(data=request.data)
        if serializer.is_valid():
            #serializer.validated_data['password'] = make_password(serializer.validated_data['password'])
            serializer.save()

            # create personal folder
            email_info = serializer.validated_data['email']
            file_storage = FileStorage()
            file_storage.create_folder(email_info)
            file_storage.create_folder_in_specific_folder(
                email_info + "@" + "data", email_info)

            data_map = {
                "folders": {
                    "000000": {
                        "name": "/",
                        "parent": "",
                        "create_date": "17/03/2020"
                    }
                },
                "files": {},
                "last_submission":
                str(int(datetime.timestamp(datetime.now()) * 1000))
            }

            base_dir = ".temp/"
            try:
                os.mkdir(os.path.join(base_dir, email_info))
            except:
                print("folder exists")
            file_path = base_dir + email_info + "/map.json"
            map_file = open(file_path, 'w')
            json.dump(data_map, map_file)
            map_file.close()

            file_storage.upload_file_with_path_in_specific_folder(
                file_path, email_info)

            requests.post('http://127.0.0.1:5000/api/gen_key',
                          data={
                              "email": serializer.validated_data['email'],
                              "secret_password": "******"
                          })

            return JsonResponse(
                {
                    'message': 'Register successful!',
                    'data_map': data_map
                },
                status=status.HTTP_201_CREATED)

        return JsonResponse(
            {
                'error_messages': serializer.errors,
                'errors_code': 400,
            },
            status=status.HTTP_400_BAD_REQUEST)
Пример #3
0
    def factory(type, conf):
        """Creates an instance of AbstractStorage class

        :raises: RuntimeError
        """
        if type == 'file':
            return FileStorage(conf)
        raise RuntimeError('Unable to instantiate storage with type %s' % type)
Пример #4
0
class TestFileStorage(unittest.TestCase):

    def setUp(self):
        self.mongo_connector = MongoConnector(host="192.168.33.10")
        self.file_storage = FileStorage(self.mongo_connector.fs)

    def test_get_records(self):
        filename = "file_storage_test.py"
        file_obj_in =  open(filename, "rb")
        _id =  self.file_storage.put(file_obj_in.read(), filename)
        file_obj_out = self.file_storage.get(_id)

    def test_get_all(self):
        all_files = self.file_storage.get_all()
        for file in all_files:
            print(file)
        pass
Пример #5
0
def get_storage_obj(dir_name=""):
    global OBJECT_STORAGE_OBJ
    if OBJECT_STORAGE_OBJ is None:
        if settings.DATA_STORAGE_USE_S3:
            OBJECT_STORAGE_OBJ = ObjectStorage()
        else:
            OBJECT_STORAGE_OBJ = FileStorage(dir_name)
    return OBJECT_STORAGE_OBJ
Пример #6
0
    def __init__(self, file, folder, vol=5):
        print('sounds starts')

        # get path
        # self.cwd = os.getcwd() + folder
        # self.cwd = file + '/' + folder
        # get sound clips
        self.cwd = folder
        fs = FileStorage()
        fs.readJson(file)
        self.db = fs.db

        print('Found {} sounds clips'.format(len(self.db)))

        self.audio_player = AudioPlayer()
        # self.audio_player.set_volume(vol)  # volume is 0-100%
        time.sleep(0.1)
        print('AudioPlayer found:', self.audio_player.audio_player)

        self.r2 = TTAstromech()
 def setUp(self):
     # Mock values to be passed to File class constructor
     file_name = "file_name"
     file_path = "C:/Desktop/my_dir/file_name"
     file_extension = "txt"
     absolute_path = "C:/Desktop/my_dir/file_name.txt"
     # An object of type DetectionIndicators
     detection_indicators = DetectionIndicators()
     """Initialise File class
 args:
   file_name (str) : file name
   file_path (str) : file path
   file_extension (str) : file extension
   absolute_path (str) : absolute path
   detection_indicators(DetectionIndicators): object of type DetectionIndicators      
 """
     self.file = File(file_name, file_path, file_extension, absolute_path,
                      detection_indicators)
     """Initialise FileStorage class
 args:
   [] (list) : an empty list to store files
 """
     self.file_storage = FileStorage([])
Пример #8
0
    def put(self, request, *args, **kwargs):
        base_dir = ".temp/"
        try:
            serializer = FileUploadSerializer(data=request.data)
            if serializer.is_valid():
                file_name = request.data['name']
                file_data = request.data['data']['data']
                file_path = base_dir + file_name
                file = open(file_path, 'wb')
                file.write(bytes(bytearray(file_data)))
                file.close()

                file_storage = FileStorage()
                files = file_storage.get_file_list_in_specific_folder(
                    request.user.email)

                remove_file = ""

                for f in files:
                    if f['title'] == file_name:
                        remove_file = f
                        break

                if remove_file != "":
                    print(remove_file['id'])
                    file_storage.delete_file_by_id(remove_file['id'])
                    file_storage.upload_file_with_path_in_specific_folder(
                        file_path, request.user.email)

                    if os.path.exists(file_path):
                        os.remove(file_path)

                        return JsonResponse({'message': request.data},
                                            status=status.HTTP_200_OK)
                    else:
                        return JsonResponse(
                            {'message': "Service unavaible"},
                            status=status.HTTP_503_SERVICE_UNAVAILABLE)
                else:
                    return JsonResponse(
                        {'message': "Service unavaible"},
                        status=status.HTTP_503_SERVICE_UNAVAILABLE)

            return JsonResponse({'message': "Request's data is wrong"},
                                status=status.HTTP_401_UNAUTHORIZED)
        except:
            JsonResponse(
                {
                    'error_message': "Somethings Error",
                    'errors_code': 400,
                },
                status=status.HTTP_402_PAYMENT_REQUIRED)
Пример #9
0
    def delete(self, request, *args, **kwargs):
        try:
            serializer = FileRemoveSerializer(data=request.data)
            if serializer.is_valid():
                file_name = request.data['name']

                file_storage = FileStorage()
                files = file_storage.get_file_list_in_specific_folder(
                    request.user.email + "@" + "data")

                remove_file = ""

                for f in files:
                    if f['title'] == file_name:
                        remove_file = f
                        break

                if remove_file != "":
                    file_storage.delete_file_by_id(remove_file['id'])

                    return JsonResponse({'message': request.data},
                                        status=status.HTTP_200_OK)
                else:
                    return JsonResponse(
                        {'message': "Service unavaible"},
                        status=status.HTTP_503_SERVICE_UNAVAILABLE)

            return JsonResponse({'message': "Request's data is wrong"},
                                status=status.HTTP_401_UNAUTHORIZED)
        except:
            JsonResponse(
                {
                    'error_message': "Somethings Error",
                    'errors_code': 400,
                },
                status=status.HTTP_402_PAYMENT_REQUIRED)
Пример #10
0
    def post(self, request):
        base_dir = ".temp/"
        try:

            serializer = FileUploadSerializer(data=request.data)
            if serializer.is_valid():
                file_name = request.data['name']
                file_data = request.data['data']['data']
                file_path = base_dir + file_name
                file = open(file_path, 'wb')
                file.write(bytes(bytearray(file_data)))
                file.close()

                file_storage = FileStorage()
                file_storage.upload_file_with_path_in_specific_folder(
                    file_path, request.user.email + "@" + "data")

                if os.path.exists(file_path):
                    os.remove(file_path)

                    return JsonResponse({'message': request.data},
                                        status=status.HTTP_200_OK)
                else:
                    return JsonResponse(
                        {'message': "Service unavaible"},
                        status=status.HTTP_503_SERVICE_UNAVAILABLE)

            return JsonResponse({'message': "Request's data is wrong"},
                                status=status.HTTP_400_BAD_REQUEST)
        except:
            JsonResponse(
                {
                    'error_message': "Somethings Error",
                    'errors_code': 400,
                },
                status=status.HTTP_400_BAD_REQUEST)
Пример #11
0
    def get(self, request, *args, **kwargs):
        base_dir = ".temp/"
        file_name = "map.json"

        file_storage = FileStorage()

        files = file_storage.get_files_by_title_in_specific_folder(
            file_name, request.user.email)
        if len(files) == 0:
            return JsonResponse({'message': "Service unavaible"},
                                status=status.HTTP_503_SERVICE_UNAVAILABLE)

        file_storage.download_file(files[0], base_dir + file_name)

        f = open(base_dir + file_name, 'rb')
        data = f.read()
        f.close()

        return JsonResponse(
            {'message': {
                'type': 'Buffer',
                'data': list(bytearray(data)),
            }},
            status=status.HTTP_200_OK)
Пример #12
0
    def setup(cls, channel, pool_size):
        cls.pool_size = pool_size

        cls.file_storage = FileStorage()
        cls.file_storage.setup(channel)

        cls.ret = cls.file_storage.read_file()

        for one_name in cls.ret.splitlines():
            if one_name not in cls.names:
                cls.names.append(one_name)

        print("INIT")
        print("READ " + cls.ret)
        print("PARSED " + str(cls.names))
Пример #13
0
    def get(self, request, *args, **kwargs):
        base_dir = ".temp/"
        file_id = kwargs['file_id']
        file_id = '0' * (6 - len(str(file_id))) + str(file_id)

        file_storage = FileStorage()

        files = file_storage.get_files_by_title_in_specific_folder(
            "map.json", request.user.email)
        if len(files) == 0:
            return JsonResponse({'message': "Service unavaible"},
                                status=status.HTTP_503_SERVICE_UNAVAILABLE)

        map_json_content = json.loads(file_storage.get_content_file(files[0]))

        if file_id not in map_json_content['files']:
            return JsonResponse({'message': "File is not found"},
                                status=status.HTTP_400_BAD_REQUEST)

        file_name = map_json_content['files'][file_id]['name']
        file_name = file_id + file_name + '.aes'

        files = file_storage.get_files_by_title(file_name)
        if len(files) == 0:
            return JsonResponse({'message': "Service unavaible"},
                                status=status.HTTP_503_SERVICE_UNAVAILABLE)
        file_storage.download_file(files[0], base_dir + file_name)

        f = open(base_dir + file_name, 'rb')
        data = f.read()
        f.close()

        return JsonResponse(
            {
                'message': {
                    'type': 'Buffer',
                    'data': list(bytearray(data)),
                    'name': file_name,
                    'map_data': map_json_content['files'][file_id]
                }
            },
            status=status.HTTP_200_OK)
Пример #14
0
    def post(self, request, *args, **kwargs):
        try:
            serializer = FileUploadSerializer(data=request.data)
            if serializer.is_valid():
                file_name = request.data['name']

                file_storage = FileStorage()
                files = file_storage.get_file_list_in_specific_folder(
                    request.user.email)
                map_file_name = "map.json"

                map_file = ""
                for f in files:
                    if f['title'] == map_file_name:
                        map_file = f
                        break

                if map_file == "":
                    return JsonResponse(
                        {'message': "Internal server error"},
                        status=status.HTTP_500_INTERNAL_SERVER_ERROR)

                map_file = json.loads(file_storage.get_content_file(map_file))

                file_data = request.data['data']['data']
                file_data = bytes(bytearray(file_data))
                file_data = json.loads(file_data)

                if int(map_file['last_submission']) == int(
                        file_data['last_submission']):
                    return JsonResponse(
                        {
                            'message': "There is nothing to sync",
                            'code': 0
                        },
                        status=status.HTTP_200_OK)

                files_1 = map_file['files']
                files_2 = file_data['files']

                file_add = list(set(files_2.keys()) - set(files_1.keys()))
                file_modified = []
                file_delete = list(set(files_1.keys()) - set(files_2.keys()))

                intersec_files = list(
                    set(files_2.keys()) & set(files_1.keys()))

                for file_id in intersec_files:
                    if files_2[file_id]['check_sum'] != files_1[file_id][
                            'check_sum']:
                        file_modified.append(file_id)

                print(file_add)
                print(file_modified)
                print(file_delete)

                if int(map_file['last_submission']) < int(
                        file_data['last_submission']):
                    # sync to server

                    return JsonResponse(
                        {
                            'message': "Sync data to server",
                            'data': {
                                'upload': file_add,
                                'update': file_modified,
                                'remove': file_delete
                            },
                            'code': 1
                        },
                        status=status.HTTP_200_OK)

                elif int(map_file['last_submission']) > int(
                        file_data['last_submission']):
                    #sync to client

                    return JsonResponse(
                        {
                            'message': "Sync data to client",
                            'data': {
                                'upload': file_delete,
                                'update': file_modified,
                                'remove': file_add
                            },
                            'code': 2
                        },
                        status=status.HTTP_200_OK)

            return JsonResponse({'message': "Request's data is wrong"},
                                status=status.HTTP_200_OK)
        except:
            JsonResponse(
                {
                    'error_message': "Somethings Error",
                    'errors_code': 400,
                },
                status=status.HTTP_400_BAD_REQUEST)
Пример #15
0
 def setUp(self):
     self.mongo_connector = MongoConnector(host="192.168.33.10")
     self.file_storage = FileStorage(self.mongo_connector.fs)
Пример #16
0
    def __init__(self, file_name):
        FileStorage.__init__(self, file_name)

        self.simple_types = []
Пример #17
0
 def save_storage(self, data):
     FileStorage.save_storage(self, data)
Пример #18
0
 def load_storage(self):
     return FileStorage.load_storage(self)
Пример #19
0
import os
import redis
from file_storage import FileStorage
from s3_storage import S3Storage
from elasticsearch import Elasticsearch
from elasticsearch_dsl import connections
from pathlib import Path
import yaml

app = Flask(__name__)

app_settings = os.getenv('APP_SETTINGS', 'server.config.DevelopmentConfig')
app.config.from_object(app_settings)

if app.config.get('UPLOAD_TYPE') == 'file':
    storage = FileStorage(app.config.get('UPLOAD_FOLDER'))
elif app.config.get('UPLOAD_TYPE') == 'aws':
    storage = S3Storage(app.config.get('AWS_BUCKET'))

tm = tus_manager(app,
                 upload_url='/uploads',
                 upload_folder=app.config.get('UPLOAD_FOLDER'),
                 overwrite=True,
                 upload_finish_cb=None,
                 storage=storage)

db = SQLAlchemy(app)
redis_db = redis.StrictRedis(host=app.config.get('REDIS_SERVERNAME'),
                             port=6379,
                             db=0,
                             password='******')
Пример #20
0
class FileStorageTest(unittest.TestCase):
    def setUp(self):
        self.subject = FileStorage('test_storage.json')

    def tearDown(self):
        if os.path.exists(self.subject.file_name):
            os.remove(self.subject.file_name)

    def test_exist_if_not_exist(self):
        self.subject.exist()

        self.assertEqual(self.subject.exist(), False)

    def test_exist_if_exist(self):
        open(self.subject.file_name, "w").write("")

        self.subject.exist()

        self.assertEqual(self.subject.exist(), True)

        os.remove(self.subject.file_name)

    def test_load(self):
        data = [MediaInfo(), MediaInfo()]
        open(self.subject.file_name, "w").write(json.dumps(data))

        self.subject.load()

        self.assertEqual(len(self.subject.items()), 2)

        os.remove(self.subject.file_name)

    def test_save(self):
        self.subject.add(MediaInfo())
        self.subject.add(MediaInfo())

        self.subject.save()

        self.assertEqual(len(self.subject.items()), 2)
Пример #21
0
 def setUp(self):
     self.subject = FileStorage('test_storage.json')
Пример #22
0
class Coffer:
    def __init__(self,config_parser):
        # Connect to engine
        database_path  = get_from_config_parser(config_parser,'Database','path','database')
        database_debug = get_boolean_from_config_parser(config_parser,'Database','debug',False)
        dir = os.path.dirname(database_path)
        if not os.path.exists(dir):
            mkdir(dir)
        sys.stderr.write('Connecting to database at "%s"\n' % database_path)
        self._engine = create_engine('sqlite:///%s' % database_path,echo=database_debug)

        # Start session
        Session = sessionmaker(bind=self._engine)
        self._session = Session()
        # Initialize feed storage
        self._feed_storage = FeedStorage(self._engine,self._session)
        # Initialize item storage
        self._item_storage = ItemStorage(self._engine,self._session)
        # A list of subprocess.Popen processes that will be maintained
        # by the Coffer object.
        self._external_processes = []
        # File storage (data dump)
        file_storage_path = get_from_config_parser(config_parser,'FileStorage','path','datadump')
        max_block_size    = get_int_from_config_parser(config_parser,'FileStorage','max-block-size',
                                                       file_storage.DEFAULT_MAX_BLOCK_SIZE)
        bzip2_path = get_from_config_parser(config_parser,'FileStorage','bzip2-path','/usr/bin/bzip2')
        self._file_storage = FileStorage(self._external_processes,file_storage_path,
                                         max_block_size,bzip2_path)
        # Content fetcher configuration
        self._fetcher = Fetcher(config_parser)

    def clone_db_session(self):
        clone_session = sessionmaker(bind=self._engine)
        return clone_session()

    def finish(self):
        '''
        Waits for all external processes started by coffer to finish.
        '''
        sys.stderr.write('Waiting for sub-processes to finish..\n')
        for process in self._external_processes:
            process.wait()
        sys.stderr.write('  ..finished.\n\n')

    def check_processes(self):
        '''
        Checks if some of the external processes have finished and
        removes them from the external-process list if they have.
        '''
        end_i = len(self._external_processes)
        i     = 0
        while i < end_i:
            if self._external_processes[i].poll() is not None:
                del self._external_processes[i]
                end_i -= 1
            else:
                i += 1

    def run_command_shell(self):
        shell = CommandShell(self)
        shell.cmdloop()

    def get_feed_info(self,url):
        '''
        Obtain information on an RSS feed, given its URL. The
        information will be obtained directly from the URL,
        not from our database. This works for feeds regardless
        of whether they are stored in our database.
        '''
        feed_results = feedparser.parse(url)
        sys.stderr.write(str(feed_results))
        if 'title' in feed_results.feed:
            return feed_results.feed.title
        else:
            return None

    def current_items_feed(self,
                           session,
                           feed,
                           enable_ad_filter = False,
                           check_existence  = False,
                           debug_enabled    = False):
        '''
        Returns a generator for the list of current items, i.e. the
        current list of fresh items returned by all known feeds.
        @param enable_ad_filter: if True, advertisements will be filtered out
                       using the predefined regex
        @param check_existence: if True, only entries that are not already
                       stored in the items database will be returned.
        '''
        if enable_ad_filter and len(feed.ad_filters) > 0:
            exclude_pattern = re.compile(u'|'.join(feed.ad_filters))
        feed_results = feedparser.parse(feed.get_url())
        for entry in feed_results.entries:
            if 'link' not in entry.keys():
                sys.stderr.write((u'No link found in this item: "%s"\n' \
                                  % entry.title).encode('utf-8'))
                if debug_enabled:
                    sys.stderr.write('Keys:\n%s\n' % str(entry.keys()))
                continue
            if 'id' not in entry.keys():
                if debug_enabled:
                    sys.stderr.write((u'No entry id found in this item: "%s"\n' \
                                      % entry.title).encode('utf-8'))
                entry_id = entry.link
                if debug_enabled:
                    sys.stderr.write('Keys:\n%s\n' % str(entry.keys()))
                    sys.stderr.write((u'Using link [%s] instead of id.\n' \
                                      % entry_id).encode('utf-8'))
            else:
                entry_id = entry.id
            if check_existence:
                if self._item_storage.exists_in_session(session,entry_id):
                    continue
            if (not enable_ad_filter) or (len(feed.ad_filters) == 0) \
                   or (not exclude_pattern.search(entry.title)):
                yield (feed.get_id(),entry_id,entry)

    def current_items_in_session(self,
                                 session,
                                 enable_ad_filter = False,
                                 check_existence  = False,
                                 debug_enabled    = False):
        '''
        Returns a generator for the list of current items, i.e. the
        current list of fresh items returned by all known feeds.
        @param enable_ad_filter: if True, advertisements will be filtered out
                       using the predefined regex
        @param check_existence: if True, only entries that are not already
                       stored in the items database will be returned.
        '''
        for feed in self._feed_storage.feeds():
            for item in self.current_items_feed(session,feed,enable_ad_filter,check_existence,debug_enabled):
                yield item

    def current_items(self,
                      enable_ad_filter = False,
                      check_existence  = False,
                      debug_enabled    = False):
        '''
        Returns a generator for the list of current items, i.e. the
        current list of fresh items returned by all known feeds.
        @param enable_ad_filter: if True, advertisements will be filtered out
                       using the predefined regex
        @param check_existence: if True, only entries that are not already
                       stored in the items database will be returned.
        '''
        for feed in self._feed_storage.feeds():
            for item in self.current_items_feed(self._session,feed,enable_ad_filter,check_existence,debug_enabled):
                yield item

    def fetch_and_store(self,targets):
        '''
        Download target URLs and store them in the file storage.
        @param targets: A list of (feed-id,URL) pairs.
        '''
        text_objs_dict = self._fetcher.fetch(targets)
        self._file_storage.store_all(text_objs_dict)
Пример #23
0
    def __init__(self, file_name):
        FileStorage.__init__(self, file_name)

        self.simple_types = []
Пример #24
0
 def load_storage(self):
     return FileStorage.load_storage(self)
class TestFileStorage(unittest.TestCase):
    # The setup before each test method
    def setUp(self):
        # Mock values to be passed to File class constructor
        file_name = "file_name"
        file_path = "C:/Desktop/my_dir/file_name"
        file_extension = "txt"
        absolute_path = "C:/Desktop/my_dir/file_name.txt"
        # An object of type DetectionIndicators
        detection_indicators = DetectionIndicators()
        """Initialise File class
    args:
      file_name (str) : file name
      file_path (str) : file path
      file_extension (str) : file extension
      absolute_path (str) : absolute path
      detection_indicators(DetectionIndicators): object of type DetectionIndicators      
    """
        self.file = File(file_name, file_path, file_extension, absolute_path,
                         detection_indicators)
        """Initialise FileStorage class
    args:
      [] (list) : an empty list to store files
    """
        self.file_storage = FileStorage([])

    # This method test FileStorage constructor
    def test_constructor(self):
        # Get actual values of the file storage class fields
        # that are passed to constructor
        expected = self.file_storage.list_of_files
        actual = []
        # Compare actual against expected, after class initialisation
        self.assertEqual(expected, actual)

    # This method tests FileStorage.add_new_file function
    def test_add_new_file(self):
        expected = 1
        # Add new entry to the list
        self.file_storage.add_new_file(self.file)
        # Compare expected number of entries against actual
        actual = len(self.file_storage.list_of_files)
        self.assertEqual(expected, actual)

    # This method tests FileStorage.get_all_files function
    def test_get_all_files(self):
        expected_nr_of_file = 1
        # Add new entry to the file storage list
        self.file_storage.add_new_file(self.file)
        # Get actual number of entries
        actual_nr_of_files = len(self.file_storage.get_all_files())
        # Get actual content of the list
        actual_content = self.file_storage.get_all_files()
        # Expected content of the list
        expected_conter = [self.file]
        # Compare expected against actual
        self.assertEqual(expected_nr_of_file, actual_nr_of_files)
        self.assertEqual(actual_content, expected_conter)

    # This method tests FileStorage.get_file_from_storage function
    def test_get_file_from_storage(self):
        # Add new entry to the file storage list
        self.file_storage.add_new_file(self.file)
        # Absolute path of the newly added file
        src = self.file.absolute_path
        # Get return value of the function that is tested while
        # passing computed source.
        actual_file = self.file_storage.get_file_from_storage(src)
        # Expected return value of the function
        expected_file = self.file
        # Compare expected against actual
        self.assertEqual(expected_file, actual_file)
        non_existing_file = self.file_storage.get_file_from_storage("test")
        self.assertEqual(False, non_existing_file)

    # This method tests FileStorage.delete_file_from_storage function
    def test_delete_file_from_storage(self):
        # Add new entry to the file storage list
        self.file_storage.add_new_file(self.file)
        # Delete new entry from the file storage list
        self.file_storage.delete_file_from_storage(self.file)
        # Evaluate if entry is still in file storage list and store
        # evaluation as boolean
        deleted = self.file in self.file_storage.list_of_files
        # Compare expected against actual
        self.assertEqual(False, deleted)  #

    def test_find_similar_file(self):
        # Add new entry to the file storage list
        self.file_storage.add_new_file(self.file)
        expected_return_value_of_function = False
        actual_return_value_of_function = self.file_storage.find_similar_file(
            self.file.absolute_path)
        # Compare expected versus actual. Expected value would be False as no
        # similar file in the storage
        self.assertEqual(expected_return_value_of_function,
                         actual_return_value_of_function)

        # Make a copy of existing file within the storage
        mock_file = copy.deepcopy(self.file)
        # Alter the way that mock ransomware would
        mock_file.change_file_extension(mock_file, ".aes")
        mock_file.change_absolute_path(mock_file,
                                       "C:/Desktop/my_dir/file_name.aes")
        expected_return_value_of_function = self.file
        actual_return_value_of_function = self.file_storage.find_similar_file(
            mock_file.absolute_path)
        # Compare expected versus actual. Expected value would be the original file
        # because all parameters except extension are the same.
        self.assertEqual(expected_return_value_of_function,
                         actual_return_value_of_function)
Пример #26
0
import logging
import os
import tempfile

from file_storage import FileStorage
from thegoogs import TheGoogs

logger = logging.getLogger("gmusic")
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())

STORAGE_PATH = "/Users/jd/ws/gmusic/data"

thegoogs = TheGoogs()

storage = FileStorage(STORAGE_PATH)
libdata = storage.read_libdata()

for index, song in enumerate(libdata.uploaded_songs):

    if not storage.song_exists(song):
        logger.info("Downloading song {}/{}: {}".format(
            index, len(libdata.uploaded_songs), song["title"]))
        data = thegoogs.get_uploaded_song(song["id"])
        storage.write_song(data, song)
    else:
        logger.info("Skipping downloaded song: {}".format(song["title"]))

for index, song in enumerate(libdata.all_songs):
    if not storage.song_exists(song):
        logger.info("Downloading unowned song {}/{}: {}".format(
Пример #27
0
            info = r.json()
            print('+', end='')
            # pprint(data)
            # print("Found:", data["hostname"], data["network"]["IPv4"]["address"])
            key = info["network"]["IPv4"]["mac"]
            data = {
                'timestamp': getTimeStamp(),
                'changed': False,
                'json': info
            }
            if key in rpis:
                if rpis[key]["json"]["hostname"] != data["json"]["hostname"]:
                    data["changed"] = True
            rpis[key] = data
    except KeyboardInterrupt:
        break
    except requests.ConnectTimeout:
        # print("Invalid:", url)
        print('.', end='')
    except requests.ConnectionError as e:
        # print("Strange:", url, e)
        print('/', end='')
    # time.sleep(0.1)
    sys.stdout.flush()

print("\nFinished scan")

if len(rpis) > 0:
    fs = FileStorage()
    fs.writeJson('network.json', rpis)
Пример #28
0
 def save_storage(self, data):
     FileStorage.save_storage(self, data)