Ejemplo n.º 1
0
 def assign_report_map(self, filename):
     try:
         self.report_map.delete()
     except:
         pass
     self.report_map = File(open(filename))
Ejemplo n.º 2
0
def generate_export(export_type, xform, export_id=None, options=None):
    """
    Create appropriate export object given the export type.

    param: export_type
    param: xform
    params: export_id: ID of export object associated with the request
    param: options: additional parameters required for the lookup.
        binary_select_multiples: boolean flag
        end: end offset
        ext: export extension type
        dataview_pk: dataview pk
        group_delimiter: "/" or "."
        query: filter_query for custom queries
        remove_group_name: boolean flag
        split_select_multiples: boolean flag
        index_tag: ('[', ']') or ('_', '_')
        show_choice_labels: boolean flag
        language: language labels as in the XLSForm/XForm
    """
    username = xform.user.username
    id_string = xform.id_string
    end = options.get("end")
    extension = options.get("extension", export_type)
    filter_query = options.get("query")
    remove_group_name = options.get("remove_group_name", False)
    start = options.get("start")

    export_type_func_map = {
        Export.XLS_EXPORT: 'to_xls_export',
        Export.CSV_EXPORT: 'to_flat_csv_export',
        Export.CSV_ZIP_EXPORT: 'to_zipped_csv',
        Export.SAV_ZIP_EXPORT: 'to_zipped_sav',
        Export.GOOGLE_SHEETS_EXPORT: 'to_google_sheets',
    }

    if xform is None:
        xform = XForm.objects.get(user__username__iexact=username,
                                  id_string__iexact=id_string)

    dataview = None
    if options.get("dataview_pk"):
        dataview = DataView.objects.get(pk=options.get("dataview_pk"))
        records = dataview.query_data(dataview,
                                      all_data=True,
                                      filter_query=filter_query)
        total_records = dataview.query_data(dataview,
                                            count=True)[0].get('count')
    else:
        records = query_data(xform, query=filter_query, start=start, end=end)

        if filter_query:
            total_records = query_data(xform,
                                       query=filter_query,
                                       start=start,
                                       end=end,
                                       count=True)[0].get('count')
        else:
            total_records = xform.num_of_submissions

    if isinstance(records, QuerySet):
        records = records.iterator()

    export_builder = ExportBuilder()
    export_builder.TRUNCATE_GROUP_TITLE = True \
        if export_type == Export.SAV_ZIP_EXPORT else remove_group_name
    export_builder.GROUP_DELIMITER = options.get("group_delimiter",
                                                 DEFAULT_GROUP_DELIMITER)
    export_builder.SPLIT_SELECT_MULTIPLES = options.get(
        "split_select_multiples", True)
    export_builder.BINARY_SELECT_MULTIPLES = options.get(
        "binary_select_multiples", False)
    export_builder.INCLUDE_LABELS = options.get('include_labels', False)
    include_reviews = options.get('include_reviews', False)
    export_builder.INCLUDE_LABELS_ONLY = options.get('include_labels_only',
                                                     False)
    export_builder.INCLUDE_HXL = options.get('include_hxl', False)

    export_builder.INCLUDE_IMAGES \
        = options.get("include_images", settings.EXPORT_WITH_IMAGE_DEFAULT)

    export_builder.VALUE_SELECT_MULTIPLES = options.get(
        'value_select_multiples', False)

    export_builder.REPEAT_INDEX_TAGS = options.get("repeat_index_tags",
                                                   DEFAULT_INDEX_TAGS)

    export_builder.SHOW_CHOICE_LABELS = options.get('show_choice_labels',
                                                    False)

    export_builder.language = options.get('language')

    # 'win_excel_utf8' is only relevant for CSV exports
    if 'win_excel_utf8' in options and export_type != Export.CSV_EXPORT:
        del options['win_excel_utf8']
    export_builder.INCLUDE_REVIEWS = include_reviews
    export_builder.set_survey(xform.survey,
                              xform,
                              include_reviews=include_reviews)

    temp_file = NamedTemporaryFile(suffix=("." + extension))

    columns_with_hxl = export_builder.INCLUDE_HXL and get_columns_with_hxl(
        xform.survey_elements)

    # get the export function by export type
    func = getattr(export_builder, export_type_func_map[export_type])
    try:
        func.__call__(temp_file.name,
                      records,
                      username,
                      id_string,
                      filter_query,
                      start=start,
                      end=end,
                      dataview=dataview,
                      xform=xform,
                      options=options,
                      columns_with_hxl=columns_with_hxl,
                      total_records=total_records)
    except NoRecordsFoundError:
        pass
    except SPSSIOError as e:
        export = get_or_create_export(export_id, xform, export_type, options)
        export.error_message = str(e)
        export.internal_status = Export.FAILED
        export.save()
        report_exception("SAV Export Failure", e, sys.exc_info())
        return export

    # generate filename
    basename = "%s_%s" % (id_string,
                          datetime.now().strftime("%Y_%m_%d_%H_%M_%S_%f"))

    if remove_group_name:
        # add 'remove group name' flag to filename
        basename = "{}-{}".format(basename, GROUPNAME_REMOVED_FLAG)
    if dataview:
        basename = "{}-{}".format(basename, DATAVIEW_EXPORT)

    filename = basename + "." + extension

    # check filename is unique
    while not Export.is_filename_unique(xform, filename):
        filename = increment_index_in_filename(filename)

    file_path = os.path.join(username, 'exports', id_string, export_type,
                             filename)

    # seek to the beginning as required by storage classes
    temp_file.seek(0)
    export_filename = default_storage.save(file_path,
                                           File(temp_file, file_path))
    temp_file.close()

    dir_name, basename = os.path.split(export_filename)

    # get or create export object
    export = get_or_create_export(export_id, xform, export_type, options)

    export.filedir = dir_name
    export.filename = basename
    export.internal_status = Export.SUCCESSFUL
    # do not persist exports that have a filter
    # Get URL of the exported sheet.
    if export_type == Export.GOOGLE_SHEETS_EXPORT:
        export.export_url = export_builder.url

    # if we should create a new export is true, we should not save it
    if start is None and end is None:
        export.save()
    return export
Ejemplo n.º 3
0
 def get_image_file(name='test.png', ext='png', size=(50, 50), color=(256, 0, 0)):
     file_obj = BytesIO()
     image = Image.new("RGBA", size=size, color=color)
     image.save(file_obj, ext)
     file_obj.seek(0)
     return File(file_obj, name=name)
Ejemplo n.º 4
0
 def post_process(self, paths, dry_run=False, verbosity=1, **options):
     # If this is a dry run, give up now!
     if dry_run:
         return
     # Compile in a temporary environment.
     with TemporaryCompileEnvironment(verbosity=verbosity) as env:
         exclude_names = list(require_settings.REQUIRE_EXCLUDE)
         compile_info = {}
         # Copy all assets into the compile dir.
         for name, storage_details in paths.items():
             storage, path = storage_details
             dst_path = os.path.join(env.compile_dir, name)
             dst_dir = os.path.dirname(dst_path)
             if not os.path.exists(dst_dir):
                 os.makedirs(dst_dir)
             # Copy and generate md5
             hash = hashlib.md5()
             with closing(storage.open(path, "rb")) as src_handle:
                 with open(dst_path, "wb") as dst_handle:
                     for block in self._file_iter(src_handle):
                         hash.update(block)
                         dst_handle.write(block)
             # Store details of file.
             compile_info[name] = hash.digest()
         # Run the optimizer.
         if require_settings.REQUIRE_BUILD_PROFILE is not False:
             if require_settings.REQUIRE_BUILD_PROFILE is not None:
                 app_build_js_path = env.compile_dir_path(
                     require_settings.REQUIRE_BUILD_PROFILE)
             else:
                 app_build_js_path = env.resource_path("app.build.js")
             env.run_optimizer(
                 app_build_js_path,
                 #dir = env.build_dir,
                 #appDir = env.compile_dir,
                 baseUrl=require_settings.REQUIRE_BASE_URL,
             )
         # Compile standalone modules.
         if require_settings.REQUIRE_STANDALONE_MODULES:
             shutil.copyfile(
                 env.resource_path("almond.js"),
                 env.compile_dir_path("almond.js"),
             )
             exclude_names.append(resolve_require_url("almond.js"))
         for standalone_module, standalone_config in require_settings.REQUIRE_STANDALONE_MODULES.items(
         ):
             if "out" in standalone_config:
                 if "build_profile" in standalone_config:
                     module_build_js_path = env.compile_dir_path(
                         standalone_config["build_profile"])
                 else:
                     module_build_js_path = env.resource_path(
                         "module.build.js")
                 env.run_optimizer(
                     module_build_js_path,
                     name="almond",
                     include=standalone_module,
                     out=env.build_dir_path(standalone_config["out"]),
                     baseUrl=os.path.join(
                         env.compile_dir,
                         require_settings.REQUIRE_BASE_URL),
                 )
             else:
                 raise ImproperlyConfigured(
                     "No 'out' option specified for module '{module}' in REQUIRE_STANDALONE_MODULES setting."
                     .format(module=standalone_module))
         # Update assets with modified ones.
         compiled_storage = FileSystemStorage(env.build_dir)
         # Walk the compiled directory, checking for modified assets.
         for build_dirpath, _, build_filenames in os.walk(env.build_dir):
             for build_filename in build_filenames:
                 # Determine asset name.
                 build_filepath = os.path.join(build_dirpath,
                                               build_filename)
                 build_name = build_filepath[len(env.build_dir) + 1:]
                 build_storage_name = build_name.replace(os.sep, "/")
                 # Ignore certain files.
                 if build_storage_name in exclude_names:
                     # Delete from storage, if originally present.
                     if build_name in compile_info:
                         del paths[build_storage_name]
                         self.delete(build_storage_name)
                     continue
                 # Update the asset.
                 with File(open(build_filepath, "rb"),
                           build_storage_name) as build_handle:
                     # Calculate asset hash.
                     hash = hashlib.md5()
                     for block in self._file_iter(build_handle):
                         hash.update(block)
                     build_handle.seek(0)
                     # Check if the asset has been modifed.
                     if build_name in compile_info:
                         # Get the hash of the new file.
                         if hash.digest() == compile_info[build_name]:
                             continue
                     # If we're here, then the asset has been modified by the build script! Time to re-save it!
                     paths[build_storage_name] = (compiled_storage,
                                                  build_name)
                     # It's definitely time to save this file.
                     self.delete(build_storage_name)
                     self.save(build_storage_name, build_handle)
                     # Report on the modified asset.
                     yield build_name, build_name, True
         # Report on modified assets.
         super_class = super(OptimizedFilesMixin, self)
         if hasattr(super_class, "post_process"):
             for path in super_class.post_process(paths, dry_run,
                                                  **options):
                 yield path
Ejemplo n.º 5
0
def generate_export(export_type,
                    extension,
                    username,
                    id_string,
                    export_id=None,
                    filter_query=None,
                    group_delimiter='/',
                    split_select_multiples=True,
                    binary_select_multiples=False):
    """
    Create appropriate export object given the export type
    """

    export_type_func_map = {
        Export.XLS_EXPORT: 'to_xls_export',
        Export.CSV_EXPORT: 'to_flat_csv_export',
        Export.CSV_ZIP_EXPORT: 'to_zipped_csv',
        Export.SAV_ZIP_EXPORT: 'to_zipped_sav',
        Export.ANALYSER_EXPORT: 'to_analyser_export'
    }

    xform = XForm.objects.get(user__username__iexact=username,
                              id_string__exact=id_string)

    # query mongo for the cursor
    records = query_mongo(username, id_string, filter_query)

    export_builder = ExportBuilder()
    export_builder.GROUP_DELIMITER = group_delimiter
    export_builder.SPLIT_SELECT_MULTIPLES = split_select_multiples
    export_builder.BINARY_SELECT_MULTIPLES = binary_select_multiples
    export_builder.set_survey(xform.data_dictionary().survey)

    prefix = slugify('{}_export__{}__{}'.format(export_type, username,
                                                id_string))
    temp_file = NamedTemporaryFile(prefix=prefix, suffix=("." + extension))

    # get the export function by export type
    func = getattr(export_builder, export_type_func_map[export_type])

    func.__call__(temp_file.name, records, username, id_string, filter_query)

    # generate filename
    basename = "%s_%s" % (id_string,
                          datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
    if export_type == Export.ANALYSER_EXPORT:
        # Analyser exports should be distinguished by more than just their file extension.
        basename = '{}_ANALYSER_{}'.format(
            id_string,
            datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
    filename = basename + "." + extension

    # check filename is unique
    while not Export.is_filename_unique(xform, filename):
        filename = increment_index_in_filename(filename)

    file_path = os.path.join(username, 'exports', id_string, export_type,
                             filename)

    # TODO: if s3 storage, make private - how will we protect local storage??
    storage = get_storage_class()()
    # seek to the beginning as required by storage classes
    temp_file.seek(0)
    export_filename = storage.save(file_path, File(temp_file, file_path))
    temp_file.close()

    dir_name, basename = os.path.split(export_filename)

    # get or create export object
    if export_id:
        export = Export.objects.get(id=export_id)
    else:
        export = Export(xform=xform, export_type=export_type)
    export.filedir = dir_name
    export.filename = basename
    export.internal_status = Export.SUCCESSFUL
    # dont persist exports that have a filter
    if filter_query is None:
        export.save()
    return export
Ejemplo n.º 6
0
 def _open(self, name, mode='r'):
     # Handle 'rb' as 'r'.
     mode = mode[:1]
     fp = cloudstorage.open(self._add_bucket(name), mode=mode)
     return File(fp)
Ejemplo n.º 7
0
 def test_save(self, mock_sftp):
     self.storage._save('foo', File(BytesIO(b'foo'), 'foo'))
     self.assertTrue(mock_sftp.open.return_value.write.called)
Ejemplo n.º 8
0
    def social_data_process(self):
        if not self.user.is_active:
            return None

        try:
            facebook_user = FacebookUser.objects.get(user=self.user,
                                                     status=True)

            # FB Profile Image
            fb_image_url = 'http://graph.facebook.com/v2.0/' + str(
                facebook_user.uid) + '/picture?type=large'
            fb_image = urllib.urlretrieve(fb_image_url)
            if fb_image[0]:
                fb_image_contents = File(open(fb_image[0]))
                if self.image:
                    self.remove_profile_images()
                self.image.save('profile-image.png',
                                fb_image_contents,
                                save=True)

            # FB Followers Count
            fql_query = 'SELECT friend_count FROM user WHERE uid=me()'
            r = requests.get('https://graph.facebook.com/v2.0/fql',
                             params={
                                 'q': fql_query,
                                 'access_token': facebook_user.access_token
                             })
            if r.json['data'][0]['friend_count']:
                self.followers = int(r.json['data'][0]['friend_count'])

            self.social_data_completed = True
            self.save()
        except:
            pass

        try:
            twitter_user = TwitterUser.objects.get(user=self.user, status=True)
            auth = tweepy.OAuthHandler(settings.TWITTER_CONSUMER_KEY,
                                       settings.TWITTER_CONSUMER_SECRET)
            auth.set_access_token(twitter_user.oauth_token,
                                  twitter_user.oauth_token_secret)
            api = tweepy.API(auth_handler=auth, api_root='/1.1')

            # TW Profile Image
            tweepy_user = api.get_user(screen_name=auth.get_username())
            partition = tweepy_user.profile_image_url.rpartition('_normal')
            if partition[0]:
                profile_image_url = partition[0] + partition[2]
            else:
                profile_image_url = tweepy_user.profile_image_url
            tw_image = urllib.urlretrieve(profile_image_url)
            if tw_image[0]:
                tw_image_contents = File(open(tw_image[0]))
                if self.image:
                    self.remove_profile_images()
                self.image.save('profile-image.png',
                                tw_image_contents,
                                save=True)

            # TW Followers Count
            if tweepy_user.followers_count:
                self.followers = int(tweepy_user.followers_count)

            self.social_data_completed = True
            self.save()
        except:
            pass
Ejemplo n.º 9
0
    def handle(self, *args, **options):
        # Users
        profile_pics_filepaths = settings.ROOT_DIR.glob("seeds/profile_pics/*")
        users = []
        for index, profile_pics_filepath in enumerate(profile_pics_filepaths):
            with open(profile_pics_filepath, "rb") as f:
                image = File(f, name=f"{profile_pics_filepath.name}")
                user = User.objects.create(name=f"User {index}",
                                           username=f"user_{index}",
                                           email=f"user{index}@example.com",
                                           avatar=image)
            user.set_password("12345")
            user.save()
            users.append(user)

        user = User.objects.create(name=f"User No Image",
                                   username=f"user_ni",
                                   email=f"*****@*****.**")
        user.set_password("12345")
        user.save()
        users.append(user)

        # Boards
        for user in users:
            print(user)
            furniture_board = Board.objects.create(name=f"Furniture",
                                                   description=f"Description",
                                                   author=user)
            people_board = Board.objects.create(name=f"People",
                                                description=f"Description",
                                                author=user)
            places_board = Board.objects.create(name=f"Places",
                                                description=f"Description",
                                                author=user)

            # Pins
            furniture_filenames = settings.ROOT_DIR.glob("seeds/furniture/*")
            people_filenames = settings.ROOT_DIR.glob("seeds/people/*")
            places_filenames = settings.ROOT_DIR.glob("seeds/places/*")

            for index, furniture_filename in enumerate(furniture_filenames):
                with open(furniture_filename, "rb") as f:
                    image = File(f, name=f"{furniture_filename.name}")
                    pin = Pin.objects.create(
                        title=f"Furniture {index}",
                        description=f"Description {index}",
                        link=f"https://link{index}.com",
                        image=image,
                        author=user)
                    furniture_board.pins.add(pin)
                    furniture_board.save()
                    print(pin)

            for index, people_filename in enumerate(people_filenames):
                with open(people_filename, "rb") as f:
                    image = File(f, name=f"{people_filename.name}")
                    pin = Pin.objects.create(
                        title=f"People {index}",
                        description=f"Description {index}",
                        link=f"https://link{index}.com",
                        image=image,
                        author=user)
                    people_board.pins.add(pin)
                    people_board.save()
                    print(pin)

            for index, places_filename in enumerate(places_filenames):
                with open(places_filename, "rb") as f:
                    image = File(f, name=f"{places_filename.name}")
                    pin = Pin.objects.create(
                        title=f"Places {index}",
                        description=f"Description {index}",
                        link=f"https://link{index}.com",
                        image=image,
                        author=user)
                    places_board.pins.add(pin)
                    places_board.save()
                    print(pin)
Ejemplo n.º 10
0
    def authenticate(self, request, code):
        def get_access_token(auth_code):
            """
            유저가 페이스북에서 우리 애플리케이션의 사용에 대해 '승인'한 경우,
            페이스북에서 우리 애플리케이션의 주소(redirect_uri)에 'code'라는 GET parameter로 전해주는
            인증 코드 (auth_code)를 사용해서
            페이스북 GraphAPI에 access_token요청, 결과를 가져와 리턴
            :param auth_code: 유저가 페이스북에 로그인/앱 승인한 결과로 돌아오는 'code' GET parameter
            :return:
            """
            redirect_uri = 'http://localhost:8000/facebook-login/'
            # 아래 엔드포인트에 GET요청을 보냄
            params_access_token = {
                'client_id': self.CLIENT_ID,
                'redirect_uri': redirect_uri,
                'client_secret': self.CLIENT_SECRET,
                'code': auth_code,
            }
            response = requests.get(self.URL_ACCESS_TOKEN, params_access_token)
            # 전송받은 결과는 JSON형식의 텍스트. requests가 제공하는 JSON 디코더를 사용해서
            # JSON텍스트를 Python dict로 변환해준다
            response_dict = response.json()
            return response_dict['access_token']

        def get_user_info(user_access_token):
            """
            User access token을 사용해서
            GraphAPI의 'User'항목을 리턴
                (엔드포인트 'me'를 사용해서 access_token에 해당하는 사용자의 정보를 가져옴)
            :param user_access_token: 정보를 가져올 Facebook User access token
            :return: User정보 (dict)
            """
            params = {
                'access_token':
                user_access_token,
                'fields':
                ','.join([
                    'id',
                    'name',
                    'picture.width(2500)',
                    'first_name',
                    'last_name',
                ])
            }
            response = requests.get(self.URL_ME, params)
            response_dict = response.json()
            return response_dict

        try:  # -> 아래 인증과정에서 오류가 발생할 경우
            #    None을 리턴할 수 있도록.
            access_token = get_access_token(code)
            user_info = get_user_info(access_token)

            facebook_id = user_info['id']
            name = user_info['name']
            first_name = user_info['first_name']
            last_name = user_info['last_name']
            url_picture = user_info['picture']['data']['url']

            try:  # -> 회원가입되지 않은 유저의 경우 예외처리
                user = User.objects.get(username=facebook_id)
            except User.DoesNotExist:
                user = User.objects.create_user(
                    username=facebook_id,
                    first_name=first_name,
                    last_name=last_name,
                    # img_profile=,
                )

                # file_name = '{album_id}.{ext}'.format(
                #     album_id=album_id,
                #     ext=get_buffer_ext(temp_file),
                # )
                # 방법1 - 지우고 다시 만들기
                # if album.img_cover:
                #     album.img_cover.delete()

            # 사진이 없을 때만 저장하는 것으로 간단하게 실습.
            if not user.img_profile:
                temp_file = download(url_picture)
                ext = get_buffer_ext(temp_file)
                user.img_profile.save(
                    f'{user.pk}-{user.first_name}{user.last_name}.{ext}',
                    File(temp_file))
            return user

            # binary_data = request.get(url_picture)
            # user.img_profile.save(facebook_id, ContentFile(binary_data))

        except Exception:
            return None
Ejemplo n.º 11
0
 def test_noname_file_get_size(self):
     self.assertEqual(File(BytesIO(b'A file with no name')).size, 19)
Ejemplo n.º 12
0
	def setUp(self):
		self.user = User.objects.create(username='******')
		self.election = Election.objects.create(name='elec foo', slug='elec-foo', owner=self.user, published=True)
		#Deleting default categories
		for category in self.election.category_set.all():
			category.delete()
		#end of deleting
		f = open(os.path.join(dirname, 'media/dummy.jpg'), 'rb')
		self.candidate_one = Candidate.objects.create(name='bar baz', election = self.election, photo=File(f))
		self.candidate_two = Candidate.objects.create(name='foo fii', election = self.election, photo=File(f))
		self.category1, created = Category.objects.get_or_create(name='FooCat',
																election=self.election,
																slug='foo-cat')
		self.category2, created = Category.objects.get_or_create(name='FooCat2',
																election=self.election,
																slug='foo-cat-2')
		self.question1, created = Question.objects.get_or_create(question='FooQuestion',
																category=self.category1)
		self.question2, created = Question.objects.get_or_create(question='BarQuestion',
																category=self.category2)
		self.answer1_1, created = Answer.objects.get_or_create(question=self.question1,
																caption='BarAnswer1Question1')
		self.answer1_2, created = Answer.objects.get_or_create(question=self.question2,
																caption='BarAnswer1Question2')
		self.answer2_1, created = Answer.objects.get_or_create(question=self.question1,
																caption='BarAnswer2uestion1')
		self.answer2_2, created = Answer.objects.get_or_create(question=self.question2,
																caption='BarAnswer2Question2')

		self.candidate_one.associate_answer(self.answer1_1)
		self.candidate_one.associate_answer(self.answer1_2)
		self.candidate_two.associate_answer(self.answer2_1)
		self.candidate_two.associate_answer(self.answer2_2)
Ejemplo n.º 13
0
 def 存壓縮檔(self, tar路徑):
     with open(tar路徑, 'rb') as tar檔案:
         self.壓縮檔.save(name='壓縮檔{0:07}.tar.gz'.format(self.影音.編號()),
                       content=File(tar檔案),
                       save=True)
Ejemplo n.º 14
0
 def assign_report_table(self, filename):
     try:
         self.report_table.delete()
     except:
         pass
     self.report_table = File(open(filename))
Ejemplo n.º 15
0
 def test_save(self, mock_ftp):
     self.storage._save('foo', File(io.BytesIO(b'foo'), 'foo'))
Ejemplo n.º 16
0
 def setUp(self):
     super().setUp()
     self.user = User.objects.get(username="******")
     Picture.objects.create(user=self.user,
                            title="Picture One",
                            cover=File(file=b""))
Ejemplo n.º 17
0
    def handle(self, *args, **options):
        all_sets = CardSet.objects.all()

        if len(all_sets) == 0:
            raise CommandError(NO_CARDSETS)

        if len(args) == 1:
            sets = CardSet.objects.filter(name__icontains=args[0],
                country="GB")
            if len(sets) < 1:
                raise CommandError(NO_SETS_FOUND.format(args[0],
                    "\n".join([str(cs) for cs in all_sets])))
        else:
            sets = CardSet.objects.filter(country="GB")

        self.stdout.write("found card sets {0}\n".format(
            ", ".join([str(cs) for cs in sets])))
        self.stdout.write("Started Scrape command\n")

        card_no = re.compile("\s*(?P<card_no>\d+)/(?P<count>\d+)\s*")
        card_list_h2 = re.compile("((C|c)ard (L|l)ist(s)*)|(Setlist)")
        energy_type = re.compile(
            "(?P<energy_type>\w+) Energy \(((TCG)|(Basic))\)")

        for cs in sets:
            self.stdout.write("Processing '{0}'\n".format(cs.name))

            if cs.partial_url is None or cs.partial_url == "":
                raise CommandError("{0} does not have a valid URL".format(cs))

            html = json.load(urlopen(
                BASE_URL.format(
                    API_URL.format(
                        urlquote(cs.partial_url)))))['parse']['text']['*']
            try:
                h2 = (node
                      for node in BeautifulSoup(html).find_all("h2")
                      if node.find("span", "mw-headline") != None and
                          node.find(text=card_list_h2) != None).next()
                rows = (node.find_all("tr")
                       for node in h2.next_siblings
                       if not isinstance(node, NavigableString) and
                           node.find("b") != None and
                           node.find("b").find(text=cs.name) != None).next()
            except StopIteration:
                self.stdout.write(
                    "'{0}' does not have any valid cards\n".format(cs.name))
                continue

            cs.card_set.all().delete()

            for tr in rows:
                td = tr.find("td")
                if td is not None and td != -1:
                    match = card_no.match(td.text)
                    if match != None \
                        and int(match.group("count")) == cs.official_count:

                        node = td.next_sibling.next_sibling
                        name_node = node.next_sibling.next_sibling
                        type_node = name_node.next_sibling.next_sibling
                        rarity_node = type_node.next_sibling.next_sibling

                        if rarity_node.a is None and rarity_node.a != -1:
                            rarity_name = "None"
                        else:
                            rarity_name = rarity_node.a['title'].strip()

                        rarity, created = Rarity.objects.get_or_create(
                            name=rarity_name)

                        if created and rarity_node.a != None \
                            and rarity_node.a.img != None:

                            logo_temp = NamedTemporaryFile()
                            rarity_url = rarity_node.a.img['src']
                            logo_temp.write(urlopen(rarity_url).read())
                            logo_ext = urlparse(rarity_url).path.split('.')[-1]
                            logo_filename="{0}.{1}".format(str(rarity.id),
                                logo_ext)
                            logo_temp.flush()
                            rarity.logo.save(logo_filename, File(logo_temp))

                        if type_node.a is not None and type_node.a != -1:
                            card_type_name = type_node.a['title'].strip()
                            t_match = energy_type.match(card_type_name)
                            if t_match != None:
                                card_type_name = t_match.group("energy_type")
                        elif type_node.img is not None and type_node.img != -1 \
                            and type_node.img['alt'] == "Dragon-attack.png":
                            card_type_name = "Dragon"
                        else:
                            try:
                                card_type_name = CARD_TYPE_MAP[
                                    type_node.text.strip()]
                            except KeyError:
                                self.stderr.write(
                                    "Unrecognised type {0}".format(
                                    str(type_node)))

                        card_type, created =  CardType.objects.get_or_create(
                            name=card_type_name)

                        if created and type_node.a != None \
                            and type_node.a.img != None:

                            logo_temp = NamedTemporaryFile()
                            card_type_url = type_node.a.img['src']
                            logo_temp.write(urlopen(card_type_url).read())
                            logo_ext = urlparse(card_type_url
                                ).path.split('.')[-1]
                            logo_filename="{0}.{1}".format(str(card_type.id),
                                    logo_ext)
                            logo_temp.flush()
                            card_type.logo.save(logo_filename, File(logo_temp))

                        card = Card(card_no=match.group("card_no"),
                            card_set=cs,
                            name=name_node.text.encode('utf-8').strip(),
                            card_type=card_type, rarity=rarity)

                        if name_node.a is not None and name_node.a != -1:
                            card.url = BASE_URL.format(name_node.a['href'][1:])

                        card.save()
                        self.stdout.write("{0}/{1} - {2} ({3})\n".format(
                            str(card.card_no), str(cs.official_count),
                            card.name, cs.name))

            self.stdout.write("total cards {0}\n".format(
                str(cs.card_set.all().count())))
Ejemplo n.º 18
0
def test_algorithm_multiple_inputs(client, algorithm_io_image, settings,
                                   component_interfaces):
    # Override the celery settings
    settings.task_eager_propagates = (True, )
    settings.task_always_eager = (True, )

    creator = UserFactory()

    assert Job.objects.count() == 0

    # Create the algorithm image
    algorithm_container, sha256 = algorithm_io_image
    alg = AlgorithmImageFactory(image__from_path=algorithm_container,
                                image_sha256=sha256,
                                ready=True)
    alg.algorithm.add_editor(creator)

    alg.algorithm.inputs.set(ComponentInterface.objects.all())
    alg.algorithm.outputs.set(
        [ComponentInterface.objects.get(slug="results-json-file")])
    # create the job
    job = Job.objects.create(creator=creator, algorithm_image=alg)

    expected = []
    for ci in ComponentInterface.objects.exclude(
            kind=InterfaceKindChoices.ZIP):
        if ci.is_image_kind:
            image_file = ImageFileFactory(
                file__from_path=Path(__file__).parent / "resources" /
                "input_file.tif")
            job.inputs.add(
                ComponentInterfaceValueFactory(interface=ci,
                                               image=image_file.image))
            expected.append("file")
        elif ci.is_file_kind:
            civ = ComponentInterfaceValueFactory(interface=ci)
            civ.file.save("test", File(BytesIO(b"")))
            civ.save()
            job.inputs.add(civ)
            expected.append("file")
        else:
            job.inputs.add(
                ComponentInterfaceValueFactory(interface=ci, value="test"))
            expected.append("test")

    with capture_on_commit_callbacks() as callbacks:
        run_algorithm_job_for_inputs(job_pk=job.pk, upload_pks=[])
    recurse_callbacks(callbacks=callbacks)

    job.refresh_from_db()
    assert job.error_message == ""
    assert job.status == job.SUCCESS

    # Remove fake value for score
    output_dict = job.outputs.first().value
    output_dict.pop("score")

    assert {f"/input/{x.relative_path}"
            for x in job.inputs.all()} == set(output_dict.keys())
    assert sorted(map(lambda x: x if x != {} else "json",
                      output_dict.values())) == sorted(expected)
Ejemplo n.º 19
0
 def test_save(self, *args):
     args[
         0].return_value.__enter__.return_value.communicate.return_value = RCLONE_EMPTY
     args[0].return_value.__enter__.return_value.returncode = SUCCESS
     self.storage._save('foo.txt', File(io.BytesIO(b'bar'), 'foo.txt'))
     self.assertTrue(args[0].called)
Ejemplo n.º 20
0
 def _set_file(self, value):
     if not isinstance(value, File):
         value = File(value)
     self._file = value
     self._committed = False
Ejemplo n.º 21
0
 def test_save_in_subdir(self, mock_sftp):
     self.storage._save('bar/foo', File(BytesIO(b'foo'), 'foo'))
     self.assertEqual(mock_sftp.mkdir.call_args_list[0][0], ('bar', ))
     self.assertTrue(mock_sftp.open.return_value.write.called)
Ejemplo n.º 22
0
def plei_thumbnail(image_url, width, height, quality=95):
    """
    Given the URL to an image, resizes the image using the given width and
    height on the first time it is requested, and returns the URL to the new
    resized image.

    Aspect ratio is always preserved - so, if width/height do not match original aspect ratio,
    the image will be resized so that one side is equal to the target dimention, and the other will be larger.

    This is useful for cases when we need to fill a rectangle with a resized image,
    and the source images have unpredictable aspect ratio.
    """
    if not image_url:
        return ""

    image_url = unquote(unicode(image_url))
    if image_url.startswith(settings.MEDIA_URL):
        image_url = image_url.replace(settings.MEDIA_URL, "", 1)
    image_dir, image_name = os.path.split(image_url)
    image_prefix, image_ext = os.path.splitext(image_name)
    filetype = {".png": "PNG", ".gif": "GIF"}.get(image_ext, "JPEG")
    thumb_name = "%s-ar-%sx%s%s" % (image_prefix, width, height, image_ext)
    thumb_dir = os.path.join(settings.MEDIA_ROOT, image_dir,
                             settings.THUMBNAILS_DIR_NAME)
    if not os.path.exists(thumb_dir):
        os.makedirs(thumb_dir)
    thumb_path = os.path.join(thumb_dir, thumb_name)
    thumb_url = "%s/%s" % (settings.THUMBNAILS_DIR_NAME,
                           quote(thumb_name.encode("utf-8")))
    image_url_path = os.path.dirname(image_url)
    if image_url_path:
        thumb_url = "%s/%s" % (image_url_path, thumb_url)

    try:
        thumb_exists = os.path.exists(thumb_path)
    except UnicodeEncodeError:
        # The image that was saved to a filesystem with utf-8 support,
        # but somehow the locale has changed and the filesystem does not
        # support utf-8.
        from mezzanine.core.exceptions import FileSystemEncodingChanged
        raise FileSystemEncodingChanged()
    if thumb_exists:
        # Thumbnail exists, don't generate it.
        return thumb_url
    elif not default_storage.exists(image_url):
        # Requested image does not exist, just return its URL.
        return image_url

    f = default_storage.open(image_url)
    try:
        image = Image.open(f)
    except:
        # Invalid image format
        return image_url

    image_info = image.info
    width = int(width)
    height = int(height)

    # If already right size, don't do anything.
    if width == image.size[0] or height == image.size[1]:
        return image_url
    # Set dimensions.
    if width and height:
        #print 'Both dimensions of %s are specified' % image_url
        if float(height) / width < float(image.size[1]) / image.size[0]:
            #print 'Original image is tall and slim, will resize by width only'
            height = 0
        else:
            #print 'Original image is wide and short, will resize by height only'
            #print 'Because %s is less than %s' % (float(image.size[1]) / image.size[0],
            #    float(height) / width)
            width = 0
    if not width:
        width = int(round(float(image.size[0]) * height / image.size[1]))
    elif not height:
        height = int(round(float(image.size[1]) * width / image.size[0]))

    if image.mode not in ("P", "L", "RGBA"):
        image = image.convert("RGBA")
    # Required for progressive jpgs.
    ImageFile.MAXBLOCK = image.size[0] * image.size[1]
    try:
        image = ImageOps.fit(image, (width, height), Image.ANTIALIAS)
        image = image.save(thumb_path, filetype, quality=quality, **image_info)
        # Push a remote copy of the thumbnail if MEDIA_URL is
        # absolute.
        if "://" in settings.MEDIA_URL:
            with open(thumb_path, "r") as f:
                default_storage.save(thumb_url, File(f))
    except Exception:
        # If an error occurred, a corrupted image may have been saved,
        # so remove it, otherwise the check for it existing will just
        # return the corrupted image next time it's requested.
        #print 'Failed to convert image! Error: %s' %  traceback.format_exc()
        try:
            os.remove(thumb_path)
        except Exception:
            #print 'Failed to remove thumbnail! Error: %s' %  traceback.format_exc()
            pass
        return image_url
    return thumb_url
Ejemplo n.º 23
0
    def handle(self, *args: Any, **options: Any) -> None:
        # Suppress spammy output from the push notifications logger
        push_notifications_logger.disabled = True

        if options["percent_huddles"] + options["percent_personals"] > 100:
            self.stderr.write("Error!  More than 100% of messages allocated.\n")
            return

        # Get consistent data for backend tests.
        if options["test_suite"]:
            random.seed(0)

            with connection.cursor() as cursor:
                # Sometimes bugs relating to confusing recipient.id for recipient.type_id
                # or <object>.id for <object>.recipient_id remain undiscovered by the test suite
                # due to these numbers happening to coincide in such a way that it makes tests
                # accidentally pass. By bumping the Recipient.id sequence by a large enough number,
                # we can have those ids in a completely different range of values than object ids,
                # eliminating the possibility of such coincidences.
                cursor.execute("SELECT setval('zerver_recipient_id_seq', 100)")

        if options["max_topics"] is None:
            # If max_topics is not set, we use a default that's big
            # enough "more topics" should appear, and scales slowly
            # with the number of messages.
            options["max_topics"] = 8 + options["num_messages"] // 1000

        if options["delete"]:
            # Start by clearing all the data in our database
            clear_database()

            # Create our three default realms
            # Could in theory be done via zerver.actions.create_realm.do_create_realm, but
            # welcome-bot (needed for do_create_realm) hasn't been created yet
            create_internal_realm()
            zulip_realm = do_create_realm(
                string_id="zulip",
                name="Zulip Dev",
                emails_restricted_to_domains=False,
                email_address_visibility=Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS,
                description="The Zulip development environment default organization."
                "  It's great for testing!",
                invite_required=False,
                plan_type=Realm.PLAN_TYPE_SELF_HOSTED,
                org_type=Realm.ORG_TYPES["business"]["id"],
                enable_spectator_access=True,
            )
            RealmDomain.objects.create(realm=zulip_realm, domain="zulip.com")
            assert zulip_realm.notifications_stream is not None
            zulip_realm.notifications_stream.name = "Verona"
            zulip_realm.notifications_stream.description = "A city in Italy"
            zulip_realm.notifications_stream.save(update_fields=["name", "description"])

            realm_user_default = RealmUserDefault.objects.get(realm=zulip_realm)
            realm_user_default.enter_sends = True
            realm_user_default.save()

            if options["test_suite"]:
                mit_realm = do_create_realm(
                    string_id="zephyr",
                    name="MIT",
                    emails_restricted_to_domains=True,
                    invite_required=False,
                    plan_type=Realm.PLAN_TYPE_SELF_HOSTED,
                    org_type=Realm.ORG_TYPES["business"]["id"],
                )
                RealmDomain.objects.create(realm=mit_realm, domain="mit.edu")

                lear_realm = do_create_realm(
                    string_id="lear",
                    name="Lear & Co.",
                    emails_restricted_to_domains=False,
                    invite_required=False,
                    plan_type=Realm.PLAN_TYPE_SELF_HOSTED,
                    org_type=Realm.ORG_TYPES["business"]["id"],
                )

                # Default to allowing all members to send mentions in
                # large streams for the test suite to keep
                # mention-related tests simple.
                zulip_realm.wildcard_mention_policy = Realm.WILDCARD_MENTION_POLICY_MEMBERS
                zulip_realm.save(update_fields=["wildcard_mention_policy"])

            # Create test Users (UserProfiles are automatically created,
            # as are subscriptions to the ability to receive personals).
            names = [
                ("Zoe", "*****@*****.**"),
                ("Othello, the Moor of Venice", "*****@*****.**"),
                ("Iago", "*****@*****.**"),
                ("Prospero from The Tempest", "*****@*****.**"),
                ("Cordelia, Lear's daughter", "*****@*****.**"),
                ("King Hamlet", "*****@*****.**"),
                ("aaron", "*****@*****.**"),
                ("Polonius", "*****@*****.**"),
                ("Desdemona", "*****@*****.**"),
                ("शिव", "*****@*****.**"),
            ]

            # For testing really large batches:
            # Create extra users with semi realistic names to make search
            # functions somewhat realistic.  We'll still create 1000 users
            # like Extra222 User for some predictability.
            num_names = options["extra_users"]
            num_boring_names = 300

            for i in range(min(num_names, num_boring_names)):
                full_name = f"Extra{i:03} User"
                names.append((full_name, f"extrauser{i}@zulip.com"))

            if num_names > num_boring_names:
                fnames = [
                    "Amber",
                    "Arpita",
                    "Bob",
                    "Cindy",
                    "Daniela",
                    "Dan",
                    "Dinesh",
                    "Faye",
                    "François",
                    "George",
                    "Hank",
                    "Irene",
                    "James",
                    "Janice",
                    "Jenny",
                    "Jill",
                    "John",
                    "Kate",
                    "Katelyn",
                    "Kobe",
                    "Lexi",
                    "Manish",
                    "Mark",
                    "Matt",
                    "Mayna",
                    "Michael",
                    "Pete",
                    "Peter",
                    "Phil",
                    "Phillipa",
                    "Preston",
                    "Sally",
                    "Scott",
                    "Sandra",
                    "Steve",
                    "Stephanie",
                    "Vera",
                ]
                mnames = ["de", "van", "von", "Shaw", "T."]
                lnames = [
                    "Adams",
                    "Agarwal",
                    "Beal",
                    "Benson",
                    "Bonita",
                    "Davis",
                    "George",
                    "Harden",
                    "James",
                    "Jones",
                    "Johnson",
                    "Jordan",
                    "Lee",
                    "Leonard",
                    "Singh",
                    "Smith",
                    "Patel",
                    "Towns",
                    "Wall",
                ]
                non_ascii_names = [
                    "Günter",
                    "أحمد",
                    "Magnús",
                    "आशी",
                    "イツキ",
                    "语嫣",
                    "அருண்",
                    "Александр",
                    "José",
                ]
                # to imitate emoji insertions in usernames
                raw_emojis = ["😎", "😂", "🐱‍👤"]

            for i in range(num_boring_names, num_names):
                fname = random.choice(fnames) + str(i)
                full_name = fname
                if random.random() < 0.7:
                    if random.random() < 0.3:
                        full_name += " " + random.choice(non_ascii_names)
                    else:
                        full_name += " " + random.choice(mnames)
                    if random.random() < 0.1:
                        full_name += f" {random.choice(raw_emojis)} "
                    else:
                        full_name += " " + random.choice(lnames)
                email = fname.lower() + "@zulip.com"
                names.append((full_name, email))

            create_users(zulip_realm, names, tos_version=settings.TERMS_OF_SERVICE_VERSION)

            # Add time zones to some users. Ideally, this would be
            # done in the initial create_users calls, but the
            # tuple-based interface for that function doesn't support
            # doing so.
            def assign_time_zone_by_delivery_email(delivery_email: str, new_time_zone: str) -> None:
                u = get_user_by_delivery_email(delivery_email, zulip_realm)
                u.timezone = new_time_zone
                u.save(update_fields=["timezone"])

            # Note: Hamlet keeps default time zone of "".
            assign_time_zone_by_delivery_email("*****@*****.**", "US/Pacific")
            assign_time_zone_by_delivery_email("*****@*****.**", "US/Pacific")
            assign_time_zone_by_delivery_email("*****@*****.**", "US/Eastern")
            assign_time_zone_by_delivery_email("*****@*****.**", "US/Eastern")
            assign_time_zone_by_delivery_email("*****@*****.**", "Canada/Newfoundland")
            assign_time_zone_by_delivery_email("*****@*****.**", "Asia/Shanghai")  # China
            assign_time_zone_by_delivery_email("*****@*****.**", "Asia/Kolkata")  # India
            assign_time_zone_by_delivery_email("*****@*****.**", "UTC")

            users = UserProfile.objects.filter(realm=zulip_realm)
            # All users in development environment are full members initially because
            # waiting period threshold is 0. Groups of Iago, Dedemona, Shiva and
            # Polonius will be updated according to their role in do_change_user_role.
            full_members_user_group = UserGroup.objects.get(
                realm=zulip_realm, name="@role:fullmembers", is_system_group=True
            )
            members_user_group = UserGroup.objects.get(
                realm=zulip_realm, name="@role:members", is_system_group=True
            )
            user_group_memberships = []
            for user_profile in list(users):
                for group in [full_members_user_group, members_user_group]:
                    user_group_membership = UserGroupMembership(
                        user_group=group, user_profile=user_profile
                    )
                    user_group_memberships.append(user_group_membership)
            UserGroupMembership.objects.bulk_create(user_group_memberships)

            iago = get_user_by_delivery_email("*****@*****.**", zulip_realm)
            do_change_user_role(iago, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
            iago.is_staff = True
            iago.save(update_fields=["is_staff"])

            # We need to create at least two test draft for Iago for the sake
            # of the cURL tests. Two since one will be deleted.
            Draft.objects.create(
                user_profile=iago,
                recipient=None,
                topic="Release Notes",
                content="Release 4.0 will contain ...",
                last_edit_time=timezone_now(),
            )
            Draft.objects.create(
                user_profile=iago,
                recipient=None,
                topic="Release Notes",
                content="Release 4.0 will contain many new features such as ... ",
                last_edit_time=timezone_now(),
            )

            desdemona = get_user_by_delivery_email("*****@*****.**", zulip_realm)
            do_change_user_role(desdemona, UserProfile.ROLE_REALM_OWNER, acting_user=None)

            shiva = get_user_by_delivery_email("*****@*****.**", zulip_realm)
            do_change_user_role(shiva, UserProfile.ROLE_MODERATOR, acting_user=None)

            polonius = get_user_by_delivery_email("*****@*****.**", zulip_realm)
            do_change_user_role(polonius, UserProfile.ROLE_GUEST, acting_user=None)

            # These bots are directly referenced from code and thus
            # are needed for the test suite.
            zulip_realm_bots = [
                ("Zulip Error Bot", "*****@*****.**"),
                ("Zulip Default Bot", "*****@*****.**"),
            ]
            for i in range(options["extra_bots"]):
                zulip_realm_bots.append((f"Extra Bot {i}", f"extrabot{i}@zulip.com"))

            create_users(
                zulip_realm, zulip_realm_bots, bot_type=UserProfile.DEFAULT_BOT, bot_owner=desdemona
            )

            zoe = get_user_by_delivery_email("*****@*****.**", zulip_realm)
            zulip_webhook_bots = [
                ("Zulip Webhook Bot", "*****@*****.**"),
            ]
            # If a stream is not supplied in the webhook URL, the webhook
            # will (in some cases) send the notification as a PM to the
            # owner of the webhook bot, so bot_owner can't be None
            create_users(
                zulip_realm,
                zulip_webhook_bots,
                bot_type=UserProfile.INCOMING_WEBHOOK_BOT,
                bot_owner=zoe,
            )
            aaron = get_user_by_delivery_email("*****@*****.**", zulip_realm)

            zulip_outgoing_bots = [
                ("Outgoing Webhook", "*****@*****.**"),
            ]
            create_users(
                zulip_realm,
                zulip_outgoing_bots,
                bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
                bot_owner=aaron,
            )
            outgoing_webhook = get_user("*****@*****.**", zulip_realm)
            add_service(
                "outgoing-webhook",
                user_profile=outgoing_webhook,
                interface=Service.GENERIC,
                base_url="http://127.0.0.1:5002",
                token=generate_api_key(),
            )

            # Add the realm internal bots to each realm.
            create_if_missing_realm_internal_bots()

            # Create public streams.
            signups_stream = Realm.INITIAL_PRIVATE_STREAM_NAME

            stream_list = [
                "Verona",
                "Denmark",
                "Scotland",
                "Venice",
                "Rome",
                signups_stream,
            ]
            stream_dict: Dict[str, Dict[str, Any]] = {
                "Denmark": {"description": "A Scandinavian country"},
                "Scotland": {"description": "Located in the United Kingdom"},
                "Venice": {"description": "A northeastern Italian city"},
                "Rome": {"description": "Yet another Italian city", "is_web_public": True},
            }

            bulk_create_streams(zulip_realm, stream_dict)
            recipient_streams: List[int] = [
                Stream.objects.get(name=name, realm=zulip_realm).id for name in stream_list
            ]

            # Create subscriptions to streams.  The following
            # algorithm will give each of the users a different but
            # deterministic subset of the streams (given a fixed list
            # of users). For the test suite, we have a fixed list of
            # subscriptions to make sure test data is consistent
            # across platforms.

            subscriptions_list: List[Tuple[UserProfile, Recipient]] = []
            profiles: Sequence[UserProfile] = list(
                UserProfile.objects.select_related().filter(is_bot=False).order_by("email")
            )

            if options["test_suite"]:
                subscriptions_map = {
                    "*****@*****.**": ["Verona"],
                    "*****@*****.**": ["Verona"],
                    "*****@*****.**": ["Verona", "Denmark", signups_stream],
                    "*****@*****.**": [
                        "Verona",
                        "Denmark",
                        "Scotland",
                        signups_stream,
                    ],
                    "*****@*****.**": ["Verona", "Denmark", "Scotland"],
                    "*****@*****.**": ["Verona", "Denmark", "Scotland", "Venice"],
                    "*****@*****.**": ["Verona", "Denmark", "Scotland", "Venice", "Rome"],
                    "*****@*****.**": ["Verona"],
                    "*****@*****.**": [
                        "Verona",
                        "Denmark",
                        "Venice",
                        signups_stream,
                    ],
                    "*****@*****.**": ["Verona", "Denmark", "Scotland"],
                }

                for profile in profiles:
                    email = profile.delivery_email
                    if email not in subscriptions_map:
                        raise Exception(f"Subscriptions not listed for user {email}")

                    for stream_name in subscriptions_map[email]:
                        stream = Stream.objects.get(name=stream_name, realm=zulip_realm)
                        r = Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id)
                        subscriptions_list.append((profile, r))
            else:
                num_streams = len(recipient_streams)
                num_users = len(profiles)
                for i, profile in enumerate(profiles):
                    # Subscribe to some streams.
                    fraction = float(i) / num_users
                    num_recips = int(num_streams * fraction) + 1

                    for type_id in recipient_streams[:num_recips]:
                        r = Recipient.objects.get(type=Recipient.STREAM, type_id=type_id)
                        subscriptions_list.append((profile, r))

            subscriptions_to_add: List[Subscription] = []
            event_time = timezone_now()
            all_subscription_logs: (List[RealmAuditLog]) = []

            i = 0
            for profile, recipient in subscriptions_list:
                i += 1
                color = STREAM_ASSIGNMENT_COLORS[i % len(STREAM_ASSIGNMENT_COLORS)]
                s = Subscription(
                    recipient=recipient,
                    user_profile=profile,
                    is_user_active=profile.is_active,
                    color=color,
                )

                subscriptions_to_add.append(s)

                log = RealmAuditLog(
                    realm=profile.realm,
                    modified_user=profile,
                    modified_stream_id=recipient.type_id,
                    event_last_message_id=0,
                    event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
                    event_time=event_time,
                )
                all_subscription_logs.append(log)

            Subscription.objects.bulk_create(subscriptions_to_add)
            RealmAuditLog.objects.bulk_create(all_subscription_logs)

            # Create custom profile field data
            phone_number = try_add_realm_custom_profile_field(
                zulip_realm, "Phone number", CustomProfileField.SHORT_TEXT, hint=""
            )
            biography = try_add_realm_custom_profile_field(
                zulip_realm,
                "Biography",
                CustomProfileField.LONG_TEXT,
                hint="What are you known for?",
            )
            favorite_food = try_add_realm_custom_profile_field(
                zulip_realm,
                "Favorite food",
                CustomProfileField.SHORT_TEXT,
                hint="Or drink, if you'd prefer",
            )
            field_data: ProfileFieldData = {
                "vim": {"text": "Vim", "order": "1"},
                "emacs": {"text": "Emacs", "order": "2"},
            }
            favorite_editor = try_add_realm_custom_profile_field(
                zulip_realm, "Favorite editor", CustomProfileField.SELECT, field_data=field_data
            )
            birthday = try_add_realm_custom_profile_field(
                zulip_realm, "Birthday", CustomProfileField.DATE
            )
            favorite_website = try_add_realm_custom_profile_field(
                zulip_realm,
                "Favorite website",
                CustomProfileField.URL,
                hint="Or your personal blog's URL",
            )
            mentor = try_add_realm_custom_profile_field(
                zulip_realm, "Mentor", CustomProfileField.USER
            )
            github_profile = try_add_realm_default_custom_profile_field(zulip_realm, "github")

            # Fill in values for Iago and Hamlet
            hamlet = get_user_by_delivery_email("*****@*****.**", zulip_realm)
            do_update_user_custom_profile_data_if_changed(
                iago,
                [
                    {"id": phone_number.id, "value": "+1-234-567-8901"},
                    {"id": biography.id, "value": "Betrayer of Othello."},
                    {"id": favorite_food.id, "value": "Apples"},
                    {"id": favorite_editor.id, "value": "emacs"},
                    {"id": birthday.id, "value": "2000-01-01"},
                    {"id": favorite_website.id, "value": "https://zulip.readthedocs.io/en/latest/"},
                    {"id": mentor.id, "value": [hamlet.id]},
                    {"id": github_profile.id, "value": "zulip"},
                ],
            )
            do_update_user_custom_profile_data_if_changed(
                hamlet,
                [
                    {"id": phone_number.id, "value": "+0-11-23-456-7890"},
                    {
                        "id": biography.id,
                        "value": "I am:\n* The prince of Denmark\n* Nephew to the usurping Claudius",
                    },
                    {"id": favorite_food.id, "value": "Dark chocolate"},
                    {"id": favorite_editor.id, "value": "vim"},
                    {"id": birthday.id, "value": "1900-01-01"},
                    {"id": favorite_website.id, "value": "https://blog.zulig.org"},
                    {"id": mentor.id, "value": [iago.id]},
                    {"id": github_profile.id, "value": "zulipbot"},
                ],
            )
        else:
            zulip_realm = get_realm("zulip")
            recipient_streams = [
                klass.type_id for klass in Recipient.objects.filter(type=Recipient.STREAM)
            ]

        # Extract a list of all users
        user_profiles: List[UserProfile] = list(UserProfile.objects.filter(is_bot=False))

        # Create a test realm emoji.
        IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png")
        with open(IMAGE_FILE_PATH, "rb") as fp:
            check_add_realm_emoji(zulip_realm, "green_tick", iago, File(fp))

        if not options["test_suite"]:
            # Populate users with some bar data
            for user in user_profiles:
                status: int = UserPresence.ACTIVE
                date = timezone_now()
                client = get_client("website")
                if user.full_name[0] <= "H":
                    client = get_client("ZulipAndroid")
                UserPresence.objects.get_or_create(
                    user_profile=user,
                    realm_id=user.realm_id,
                    client=client,
                    timestamp=date,
                    status=status,
                )

        user_profiles_ids = [user_profile.id for user_profile in user_profiles]

        # Create several initial huddles
        for i in range(options["num_huddles"]):
            get_huddle(random.sample(user_profiles_ids, random.randint(3, 4)))

        # Create several initial pairs for personals
        personals_pairs = [
            random.sample(user_profiles_ids, 2) for i in range(options["num_personals"])
        ]

        create_alert_words(zulip_realm.id)

        # Generate a new set of test data.
        create_test_data()

        if options["delete"]:
            if options["test_suite"]:
                # Create test users; the MIT ones are needed to test
                # the Zephyr mirroring codepaths.
                event_time = timezone_now()
                testsuite_mit_users = [
                    ("Fred Sipb (MIT)", "*****@*****.**"),
                    ("Athena Consulting Exchange User (MIT)", "*****@*****.**"),
                    ("Esp Classroom (MIT)", "*****@*****.**"),
                ]
                create_users(
                    mit_realm, testsuite_mit_users, tos_version=settings.TERMS_OF_SERVICE_VERSION
                )

                mit_user = get_user_by_delivery_email("*****@*****.**", mit_realm)
                mit_signup_stream = Stream.objects.get(
                    name=Realm.INITIAL_PRIVATE_STREAM_NAME, realm=mit_realm
                )
                bulk_add_subscriptions(mit_realm, [mit_signup_stream], [mit_user], acting_user=None)

                testsuite_lear_users = [
                    ("King Lear", "*****@*****.**"),
                    ("Cordelia, Lear's daughter", "*****@*****.**"),
                ]
                create_users(
                    lear_realm, testsuite_lear_users, tos_version=settings.TERMS_OF_SERVICE_VERSION
                )

                lear_user = get_user_by_delivery_email("*****@*****.**", lear_realm)
                lear_signup_stream = Stream.objects.get(
                    name=Realm.INITIAL_PRIVATE_STREAM_NAME, realm=lear_realm
                )
                bulk_add_subscriptions(
                    lear_realm, [lear_signup_stream], [lear_user], acting_user=None
                )

            if not options["test_suite"]:
                # To keep the messages.json fixtures file for the test
                # suite fast, don't add these users and subscriptions
                # when running populate_db for the test suite

                # to imitate emoji insertions in stream names
                raw_emojis = ["😎", "😂", "🐱‍👤"]

                zulip_stream_dict: Dict[str, Dict[str, Any]] = {
                    "devel": {"description": "For developing"},
                    # ビデオゲーム - VideoGames (japanese)
                    "ビデオゲーム": {"description": f"Share your favorite video games!  {raw_emojis[2]}"},
                    "announce": {
                        "description": "For announcements",
                        "stream_post_policy": Stream.STREAM_POST_POLICY_ADMINS,
                    },
                    "design": {"description": "For design"},
                    "support": {"description": "For support"},
                    "social": {"description": "For socializing"},
                    "test": {"description": "For testing `code`"},
                    "errors": {"description": "For errors"},
                    # 조리법 - Recipes (Korean) , Пельмени - Dumplings (Russian)
                    "조리법 "
                    + raw_emojis[0]: {"description": "Everything cooking, from pasta to Пельмени"},
                }

                extra_stream_names = [
                    "802.11a",
                    "Ad Hoc Network",
                    "Augmented Reality",
                    "Cycling",
                    "DPI",
                    "FAQ",
                    "FiFo",
                    "commits",
                    "Control panel",
                    "desktop",
                    "компьютеры",
                    "Data security",
                    "desktop",
                    "काम",
                    "discussions",
                    "Cloud storage",
                    "GCI",
                    "Vaporware",
                    "Recent Trends",
                    "issues",
                    "live",
                    "Health",
                    "mobile",
                    "空間",
                    "provision",
                    "hidrógeno",
                    "HR",
                    "アニメ",
                ]

                # Add stream names and stream descriptions
                for i in range(options["extra_streams"]):
                    extra_stream_name = random.choice(extra_stream_names) + " " + str(i)

                    # to imitate emoji insertions in stream names
                    if random.random() <= 0.15:
                        extra_stream_name += random.choice(raw_emojis)

                    zulip_stream_dict[extra_stream_name] = {
                        "description": "Auto-generated extra stream.",
                    }

                bulk_create_streams(zulip_realm, zulip_stream_dict)
                # Now that we've created the notifications stream, configure it properly.
                zulip_realm.notifications_stream = get_stream("announce", zulip_realm)
                zulip_realm.save(update_fields=["notifications_stream"])

                # Add a few default streams
                for default_stream_name in ["design", "devel", "social", "support"]:
                    DefaultStream.objects.create(
                        realm=zulip_realm, stream=get_stream(default_stream_name, zulip_realm)
                    )

                # Now subscribe everyone to these streams
                subscribe_users_to_streams(zulip_realm, zulip_stream_dict)

            create_user_groups()

            if not options["test_suite"]:
                # We populate the analytics database here for
                # development purpose only
                call_command("populate_analytics_db")

        threads = options["threads"]
        jobs: List[Tuple[int, List[List[int]], Dict[str, Any], int]] = []
        for i in range(threads):
            count = options["num_messages"] // threads
            if i < options["num_messages"] % threads:
                count += 1
            jobs.append((count, personals_pairs, options, random.randint(0, 10**10)))

        for job in jobs:
            generate_and_send_messages(job)

        if options["delete"]:
            if not options["test_suite"]:
                # These bots are not needed by the test suite
                # Also, we don't want interacting with each other
                # in dev setup.
                internal_zulip_users_nosubs = [
                    ("Zulip Commit Bot", "*****@*****.**"),
                    ("Zulip Trac Bot", "*****@*****.**"),
                    ("Zulip Nagios Bot", "*****@*****.**"),
                ]
                create_users(
                    zulip_realm,
                    internal_zulip_users_nosubs,
                    bot_type=UserProfile.DEFAULT_BOT,
                    bot_owner=desdemona,
                )

            mark_all_messages_as_read()
            self.stdout.write("Successfully populated test database.\n")

        push_notifications_logger.disabled = False
Ejemplo n.º 24
0
def ticket(request):

    step = 2 if request.FILES.getlist('attachment') else 1

    files = []
    if request.POST.get('debug') == 'on':
        debug = True
        with open(TICKET_PROGRESS, 'w') as f:
            f.write(json.dumps({'indeterminate': True, 'step': step}))
        step += 1

        mntpt, direc, dump = debug_get_settings()
        debug_generate()

        _n = notifier()
        if not _n.is_freenas() and _n.failover_licensed():
            debug_file = '%s/debug.tar' % direc
            debug_name = 'debug-%s.tar' % time.strftime('%Y%m%d%H%M%S')
        else:
            gc = GlobalConfiguration.objects.all().order_by('-id')[0]
            debug_file = dump
            debug_name = 'debug-%s-%s.txz' % (
                gc.gc_hostname.encode('utf-8'),
                time.strftime('%Y%m%d%H%M%S'),
            )

        files.append(File(open(debug_file, 'rb'), name=debug_name))
    else:
        debug = False

    with open(TICKET_PROGRESS, 'w') as f:
        f.write(json.dumps({'indeterminate': True, 'step': step}))
    step += 1

    data = {
        'title': request.POST.get('subject'),
        'body': request.POST.get('desc'),
        'version': get_sw_version().split('-', 1)[-1],
        'category': request.POST.get('category'),
        'debug': debug,
    }

    if get_sw_name().lower() == 'freenas':
        data.update({
            'user': request.POST.get('username'),
            'password': request.POST.get('password'),
            'type': request.POST.get('type'),
        })
    else:

        serial = subprocess.Popen(
            ['/usr/local/sbin/dmidecode', '-s', 'system-serial-number'],
            stdout=subprocess.PIPE).communicate()[0].split('\n')[0].upper()

        license, reason = utils.get_license()
        if license:
            company = license.customer_name
        else:
            company = 'Unknown'

        data.update({
            'phone': request.POST.get('phone'),
            'name': request.POST.get('name'),
            'company': company,
            'email': request.POST.get('email'),
            'criticality': request.POST.get('criticality'),
            'environment': request.POST.get('environment'),
            'serial': serial,
        })

    success, msg, tid = utils.new_ticket(data)

    with open(TICKET_PROGRESS, 'w') as f:
        f.write(json.dumps({'indeterminate': True, 'step': step}))
    step += 1

    data = {'message': msg, 'error': not success}

    if not success:
        pass
    else:

        files.extend(request.FILES.getlist('attachment'))
        for f in files:
            success, attachmsg = utils.ticket_attach(
                {
                    'user': request.POST.get('username'),
                    'password': request.POST.get('password'),
                    'ticketnum': tid,
                }, f)

    data = ('<html><body><textarea>%s</textarea></boby></html>' %
            (json.dumps(data), ))
    return HttpResponse(data)
Ejemplo n.º 25
0
def generate_attachments_zip_export(export_type,
                                    username,
                                    id_string,
                                    export_id=None,
                                    options=None,
                                    xform=None):
    """
    Generates zip export of attachments.

    param: export_type
    params: username: logged in username
    params: id_string: xform id_string
    params: export_id: ID of export object associated with the request
    param: options: additional parameters required for the lookup.
        ext: File extension of the generated export
    """
    export_type = options.get("extension", export_type)
    filter_query = options.get("query")

    if xform is None:
        xform = XForm.objects.get(user__username=username, id_string=id_string)

    if options.get("dataview_pk"):
        dataview = DataView.objects.get(pk=options.get("dataview_pk"))
        attachments = Attachment.objects.filter(
            instance_id__in=[
                rec.get('_id') for rec in dataview.query_data(
                    dataview, all_data=True, filter_query=filter_query)
            ],
            instance__deleted_at__isnull=True)
    else:
        instance_ids = query_data(xform, fields='["_id"]', query=filter_query)
        attachments = Attachment.objects.filter(
            instance__deleted_at__isnull=True)
        if xform.is_merged_dataset:
            attachments = attachments.filter(instance__xform_id__in=[
                i for i in xform.mergedxform.xforms.filter(
                    deleted_at__isnull=True).values_list('id', flat=True)
            ]).filter(instance_id__in=[i_id['_id'] for i_id in instance_ids])
        else:
            attachments = attachments.filter(
                instance__xform_id=xform.pk).filter(
                    instance_id__in=[i_id['_id'] for i_id in instance_ids])

    filename = "%s_%s.%s" % (id_string,
                             datetime.now().strftime("%Y_%m_%d_%H_%M_%S"),
                             export_type.lower())
    file_path = os.path.join(username, 'exports', id_string, export_type,
                             filename)
    zip_file = None

    try:
        zip_file = create_attachments_zipfile(attachments)

        try:
            temp_file = builtins.open(zip_file.name, 'rb')
            filename = default_storage.save(file_path,
                                            File(temp_file, file_path))
        finally:
            temp_file.close()
    finally:
        if zip_file:
            zip_file.close()

    export = get_or_create_export(export_id, xform, export_type, options)
    export.filedir, export.filename = os.path.split(filename)
    export.internal_status = Export.SUCCESSFUL
    export.save()

    return export
Ejemplo n.º 26
0
 def test_put_file(self, mock_ftp):
     self.storage._start_connection()
     self.storage._put_file('foo', File(io.BytesIO(b'foo'), 'foo'))
Ejemplo n.º 27
0
def generate_export(export_type, extension, username, id_string,
                    export_id=None, filter_query=None, group_delimiter='/',
                    split_select_multiples=True,
                    binary_select_multiples=False,
                    sync_to_gsuit=False):
    """
    Create appropriate export object given the export type
    """
    export_type_func_map = {
        Export.XLS_EXPORT: 'to_xls_export',
        Export.CSV_EXPORT: 'to_flat_csv_export',
        Export.CSV_ZIP_EXPORT: 'to_zipped_csv',
        Export.SAV_ZIP_EXPORT: 'to_zipped_sav',
        Export.ANALYSER_EXPORT: 'to_analyser_export'
    }

    xform = XForm.objects.get(
        user__username__iexact=username, id_string__exact=id_string)

    # query mongo for the cursor
    records = query_mongo(username, id_string, filter_query)

    export_builder = ExportBuilder()
    export_builder.GROUP_DELIMITER = group_delimiter
    export_builder.SPLIT_SELECT_MULTIPLES = split_select_multiples
    export_builder.BINARY_SELECT_MULTIPLES = binary_select_multiples
    __version__ = "0"
    try:
        __version__ = filter_query['$and'][0]['__version__']
    except Exception as e:
        print(str(e))
    if __version__:
        survey = build_survey_from_history(xform, __version__)
        if not survey:
            export_builder.set_survey(xform.data_dictionary().survey)
        else:
            export_builder.set_survey(survey)
    else:
        export_builder.set_survey(xform.data_dictionary().survey)

    prefix = slugify('{}_export__{}__{}'.format(export_type, username, id_string))
    temp_file = NamedTemporaryFile(prefix=prefix, suffix=("." + extension))

    # get the export function by export type
    func = getattr(export_builder, export_type_func_map[export_type])

    func.__call__(
        temp_file.name, records, username, id_string, filter_query)

    # generate filename
    basename = "%s_%s" % (
        id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
    if export_type == Export.ANALYSER_EXPORT:
        # Analyser exports should be distinguished by more than just their file extension.
        basename= '{}_ANALYSER_{}'.format(id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
    filename = basename + "." + extension

    # check filename is unique
    while not Export.is_filename_unique(xform, filename):
        filename = increment_index_in_filename(filename)

    file_path = os.path.join(
        username,
        'exports',
        id_string,
        export_type,
        filename)

    # TODO: if s3 storage, make private - how will we protect local storage??
    storage = get_storage_class()()
    # seek to the beginning as required by storage classes
    
    print sync_to_gsuit, 'file_url--------->', temp_file, filter_query

    try:
        if sync_to_gsuit == True and '__version__' not in filter_query['$and'][0]:
            if not os.path.exists("media/forms/"):
                os.makedirs("media/forms/")

            temporarylocation="media/forms/submissions_{}.xls".format(id_string)
            import shutil
            shutil.copy(temp_file.name, temporarylocation)
            fxf_form = FieldSightXF.objects.get(pk=filter_query['$and'][0]['fs_project_uuid'])
            if fxf_form.schedule:
                name = fxf_form.schedule.name
            elif fxf_form.stage:
                name = fxf_form.stage.name
            else:
                name = fxf_form.xf.title
            upload_to_drive(temporarylocation, name+'_'+id_string, str(fxf_form.id)+'_'+name+'_'+id_string, fxf_form.project)
        
            os.remove(temporarylocation)
        
    except Exception as e:
        print e.__dict__
    # get or create export object
    temp_file.seek(0)
    export_filename = storage.save(
        file_path,
        File(temp_file, file_path))
    
    dir_name, basename = os.path.split(export_filename)
    temp_file.close()
    if export_id:
        export = Export.objects.get(id=export_id)
    else:
        fsxf = filter_query.values()[0]
        # print("fsxf", fsxf)
        export = Export(xform=xform, export_type=export_type, fsxf_id=fsxf)
    export.filedir = dir_name
    export.filename = basename
    export.internal_status = Export.SUCCESSFUL
    # dont persist exports that have a filter
    if filter_query is None:
        export.save()
    export.save()
    return export
Ejemplo n.º 28
0
 def test_put_file_error(self, mock_ftp):
     self.storage._start_connection()
     with self.assertRaises(ftp.FTPStorageException):
         self.storage._put_file('foo', File(io.BytesIO(b'foo'), 'foo'))
Ejemplo n.º 29
0
 def test_noname_file_default_name(self):
     self.assertEqual(File(BytesIO(b'A file with no name')).name, None)
Ejemplo n.º 30
0
def ticket(request):

    step = 2 if request.FILES.getlist('attachment') else 1

    files = []
    if request.POST.get('debug') == 'on':
        debug = True
        with open(TICKET_PROGRESS, 'w') as f:
            f.write(json.dumps({'indeterminate': True, 'step': step}))
        step += 1

        mntpt, direc, dump = debug_get_settings()
        debug_run(direc)
        files.append(File(open(dump, 'rb'), name=os.path.basename(dump)))
    else:
        debug = False

    with open(TICKET_PROGRESS, 'w') as f:
        f.write(json.dumps({'indeterminate': True, 'step': step}))
    step += 1

    data = {
        'title': request.POST.get('subject'),
        'body': request.POST.get('desc'),
        'version': get_sw_version().split('-', 1)[-1],
        'category': request.POST.get('category'),
        'debug': debug,
    }

    if get_sw_name().lower() == 'freenas':
        data.update({
            'user': request.POST.get('username'),
            'password': request.POST.get('password'),
            'type': request.POST.get('type'),
        })
    else:

        serial = subprocess.Popen(
            ['/usr/local/sbin/dmidecode', '-s', 'system-serial-number'],
            stdout=subprocess.PIPE).communicate()[0].split('\n')[0].upper()

        license, reason = utils.get_license()
        if license:
            company = license.customer_name
        else:
            company = 'Unknown'

        data.update({
            'phone': request.POST.get('phone'),
            'name': request.POST.get('name'),
            'company': company,
            'email': request.POST.get('email'),
            'criticality': request.POST.get('criticality'),
            'environment': request.POST.get('environment'),
            'serial': serial,
        })

    success, msg, tid = utils.new_ticket(data)

    with open(TICKET_PROGRESS, 'w') as f:
        f.write(json.dumps({'indeterminate': True, 'step': step}))
    step += 1

    data = {'message': msg, 'error': not success}

    if not success:
        pass
    else:

        files.extend(request.FILES.getlist('attachment'))
        for f in files:
            success, attachmsg = utils.ticket_attach(
                {
                    'user': request.POST.get('username'),
                    'password': request.POST.get('password'),
                    'ticketnum': tid,
                }, f)

    data = ('<html><body><textarea>%s</textarea></boby></html>' %
            (json.dumps(data), ))
    return HttpResponse(data)