Exemplo n.º 1
0
class Photo(models.Model):
    image_path = models.FilePathField(max_length=512, db_index=True)
    image_hash = models.CharField(primary_key=True, max_length=32, null=False)

    thumbnail = models.ImageField(upload_to='thumbnails')
    thumbnail_tiny = models.ImageField(upload_to='thumbnails_tiny')
    thumbnail_small = models.ImageField(upload_to='thumbnails_small')
    thumbnail_big = models.ImageField(upload_to='thumbnails_big')

    square_thumbnail = models.ImageField(upload_to='square_thumbnails')
    square_thumbnail_tiny = models.ImageField(
        upload_to='square_thumbnails_tiny')
    square_thumbnail_small = models.ImageField(
        upload_to='square_thumbnails_small')
    square_thumbnail_big = models.ImageField(upload_to='square_thumbnails_big')

    image = models.ImageField(upload_to='photos')

    added_on = models.DateTimeField(null=False, blank=False, db_index=True)

    exif_gps_lat = models.FloatField(blank=True, null=True)
    exif_gps_lon = models.FloatField(blank=True, null=True)
    exif_timestamp = models.DateTimeField(blank=True, null=True, db_index=True)

    exif_json = JSONField(blank=True, null=True)

    geolocation_json = JSONField(blank=True, null=True, db_index=True)
    captions_json = JSONField(blank=True, null=True, db_index=True)

    search_captions = models.TextField(blank=True, null=True, db_index=True)
    search_location = models.TextField(blank=True, null=True, db_index=True)

    favorited = models.BooleanField(default=False, db_index=True)

    def _generate_md5(self):
        hash_md5 = hashlib.md5()
        with open(self.image_path, "rb") as f:
            for chunk in iter(lambda: f.read(4096), b""):
                hash_md5.update(chunk)
        self.image_hash = hash_md5.hexdigest()

    def _generate_captions(self):
        image_path = self.image_path
        captions = {}
        try:
            with open(image_path, "rb") as image_file:
                encoded_string = base64.b64encode(image_file.read())
            encoded_string = str(encoded_string)[2:-1]
            resp_captions = requests.post('http://localhost:5000/',
                                          data=encoded_string)
            captions['densecap'] = resp_captions.json()['data'][:10]
            self.search_captions = ' , '.join(
                resp_captions.json()['data'][:10])
            self.save()
        except:
            util.logger.warning(
                'could not generate densecap captions for image %s' %
                image_path)

        try:
            res_places365 = inference_places365(image_path)
            captions['places365'] = res_places365
            self.captions_json = captions
            if self.search_captions:
                self.search_captions = self.search_captions + ' , ' + \
                    ' , '.join(res_places365['attributes'] + res_places365['categories'] + [res_places365['environment']])
            else:
                self.search_captions = ' , '.join(
                    res_places365['attributes'] + res_places365['categories'] +
                    [res_places365['environment']])

            self.save()
        except:
            util.logger.warning(
                'could not generate places365 captions for image %s' %
                image_path)

    def _generate_thumbnail(self):
        image = PIL.Image.open(self.image_path)

        # If no ExifTags, no rotating needed.
        try:
            # Grab orientation value.
            image_exif = image._getexif()
            image_orientation = image_exif[274]

            # Rotate depending on orientation.
            if image_orientation == 2:
                image = image.transpose(PIL.Image.FLIP_LEFT_RIGHT)
            if image_orientation == 3:
                image = image.transpose(PIL.Image.ROTATE_180)
            if image_orientation == 4:
                image = image.transpose(PIL.Image.FLIP_TOP_BOTTOM)
            if image_orientation == 5:
                image = image.transpose(PIL.Image.FLIP_LEFT_RIGHT).transpose(
                    PIL.Image.ROTATE_90)
            if image_orientation == 6:
                image = image.transpose(PIL.Image.ROTATE_270)
            if image_orientation == 7:
                image = image.transpose(PIL.Image.FLIP_TOP_BOTTOM).transpose(
                    PIL.Image.ROTATE_90)
            if image_orientation == 8:
                image = image.transpose(PIL.Image.ROTATE_90)
        except:
            pass

        # make thumbnails
        image.thumbnail(ownphotos.settings.THUMBNAIL_SIZE_BIG,
                        PIL.Image.ANTIALIAS)
        image_io_thumb = BytesIO()
        image.save(image_io_thumb, format="JPEG")
        self.thumbnail_big.save(self.image_hash + '.jpg',
                                ContentFile(image_io_thumb.getvalue()))
        image_io_thumb.close()

        square_thumb = ImageOps.fit(image,
                                    ownphotos.settings.THUMBNAIL_SIZE_BIG,
                                    PIL.Image.ANTIALIAS)
        image_io_square_thumb = BytesIO()
        square_thumb.save(image_io_square_thumb, format="JPEG")
        self.square_thumbnail_big.save(
            self.image_hash + '.jpg',
            ContentFile(image_io_square_thumb.getvalue()))
        image_io_square_thumb.close()

        image.thumbnail(ownphotos.settings.THUMBNAIL_SIZE_MEDIUM,
                        PIL.Image.ANTIALIAS)
        image_io_thumb = BytesIO()
        image.save(image_io_thumb, format="JPEG")
        self.thumbnail.save(self.image_hash + '.jpg',
                            ContentFile(image_io_thumb.getvalue()))
        image_io_thumb.close()

        square_thumb = ImageOps.fit(image,
                                    ownphotos.settings.THUMBNAIL_SIZE_MEDIUM,
                                    PIL.Image.ANTIALIAS)
        image_io_square_thumb = BytesIO()
        square_thumb.save(image_io_square_thumb, format="JPEG")
        self.square_thumbnail.save(
            self.image_hash + '.jpg',
            ContentFile(image_io_square_thumb.getvalue()))
        image_io_square_thumb.close()

        image.thumbnail(ownphotos.settings.THUMBNAIL_SIZE_SMALL,
                        PIL.Image.ANTIALIAS)
        image_io_thumb = BytesIO()
        image.save(image_io_thumb, format="JPEG")
        self.thumbnail_small.save(self.image_hash + '.jpg',
                                  ContentFile(image_io_thumb.getvalue()))
        image_io_thumb.close()

        square_thumb = ImageOps.fit(image,
                                    ownphotos.settings.THUMBNAIL_SIZE_SMALL,
                                    PIL.Image.ANTIALIAS)
        image_io_square_thumb = BytesIO()
        square_thumb.save(image_io_square_thumb, format="JPEG")
        self.square_thumbnail_small.save(
            self.image_hash + '.jpg',
            ContentFile(image_io_square_thumb.getvalue()))
        image_io_square_thumb.close()

        image.thumbnail(ownphotos.settings.THUMBNAIL_SIZE_TINY,
                        PIL.Image.ANTIALIAS)
        image_io_thumb = BytesIO()
        image.save(image_io_thumb, format="JPEG")
        self.thumbnail_tiny.save(self.image_hash + '.jpg',
                                 ContentFile(image_io_thumb.getvalue()))
        image_io_thumb.close()

        square_thumb = ImageOps.fit(image,
                                    ownphotos.settings.THUMBNAIL_SIZE_TINY,
                                    PIL.Image.ANTIALIAS)
        image_io_square_thumb = BytesIO()
        square_thumb.save(image_io_square_thumb, format="JPEG")
        self.square_thumbnail_tiny.save(
            self.image_hash + '.jpg',
            ContentFile(image_io_square_thumb.getvalue()))
        image_io_square_thumb.close()

    def _save_image_to_db(self):
        image = PIL.Image.open(self.image_path)

        try:
            # Grab orientation value.
            image_exif = image._getexif()
            image_orientation = image_exif[274]

            # Rotate depending on orientation.
            if image_orientation == 2:
                image = image.transpose(PIL.Image.FLIP_LEFT_RIGHT)
            if image_orientation == 3:
                image = image.transpose(PIL.Image.ROTATE_180)
            if image_orientation == 4:
                image = image.transpose(PIL.Image.FLIP_TOP_BOTTOM)
            if image_orientation == 5:
                image = image.transpose(PIL.Image.FLIP_LEFT_RIGHT).transpose(
                    PIL.Image.ROTATE_90)
            if image_orientation == 6:
                image = image.transpose(PIL.Image.ROTATE_270)
            if image_orientation == 7:
                image = image.transpose(PIL.Image.FLIP_TOP_BOTTOM).transpose(
                    PIL.Image.ROTATE_90)
            if image_orientation == 8:
                image = image.transpose(PIL.Image.ROTATE_90)
        except:
            pass

        # image.thumbnail(ownphotos.settings.FULLPHOTO_SIZE, PIL.Image.ANTIALIAS)
        image_io = BytesIO()
        image.save(image_io, format="JPEG")
        self.image.save(self.image_hash + '.jpg',
                        ContentFile(image_io.getvalue()))
        image_io.close()

    def _extract_exif(self):
        ret = {}
        # ipdb.set_trace()
        i = PIL.Image.open(self.image_path)
        info = i._getexif()
        date_format = "%Y:%m:%d %H:%M:%S"
        if info:
            for tag, value in info.items():
                decoded = EXIFTAGS.get(tag, tag)
                ret[decoded] = value

            with open(self.image_path, 'rb') as fimg:
                exif = exifread.process_file(fimg, details=False)

                serializable = dict([key, value.printable]
                                    for key, value in exif.items())
                self.exif_json = serializable
                # ipdb.set_trace()
                if 'EXIF DateTimeOriginal' in exif.keys():
                    tst_str = exif['EXIF DateTimeOriginal'].values
                    try:
                        tst_dt = datetime.strptime(tst_str, date_format)
                    except:
                        tst_dt = None
                    # ipdb.set_trace()
                    self.exif_timestamp = tst_dt
                else:
                    self.exif_timestamp = None

                if 'GPS GPSLongitude' in exif.keys():
                    self.exif_gps_lon = util.convert_to_degrees(
                        exif['GPS GPSLongitude'].values)
                    # Check for correct positive/negative degrees
                    if exif['GPS GPSLongitudeRef'].values != 'E':
                        self.exif_gps_lon = -self.exif_gps_lon
                else:
                    self.exif_gps_lon = None

                if 'GPS GPSLatitude' in exif.keys():
                    self.exif_gps_lat = util.convert_to_degrees(
                        exif['GPS GPSLatitude'].values)
                    # Check for correct positive/negative degrees
                    if exif['GPS GPSLatitudeRef'].values != 'N':
                        self.exif_gps_lat = -self.exif_gps_lat
                else:
                    self.exif_gps_lat = None

        if not self.exif_timestamp:
            try:
                basename_without_extension = os.path.basename(self.image_path)
                self.exif_timestamp = dateparser.parse(
                    basename_without_extension, ignoretz=True, fuzzy=True)
            except BaseException:
                util.logger.warning(
                    "Failed to determine date from filename for image %s" %
                    self.image_path)

    def _geolocate(self):
        if not (self.exif_gps_lat and self.exif_gps_lon):
            self._extract_exif()
        if (self.exif_gps_lat and self.exif_gps_lon):
            try:
                location = geolocator.reverse(
                    "%f,%f" % (self.exif_gps_lat, self.exif_gps_lon))
                location = location.raw
                self.geolocation_json = location
                self.save()
            except:
                pass
                # self.geolocation_json = {}

    def _geolocate_mapbox(self):
        if not (self.exif_gps_lat and self.exif_gps_lon):
            self._extract_exif()
        if (self.exif_gps_lat and self.exif_gps_lon):
            try:
                res = util.mapbox_reverse_geocode(self.exif_gps_lat,
                                                  self.exif_gps_lon)
                self.geolocation_json = res
                if 'search_text' in res.keys():
                    if self.search_location:
                        self.search_location = self.search_location + ' ' + res[
                            'search_text']
                    else:
                        self.search_location = res['search_text']
                self.save()
            except:
                util.logger.warning('something went wrong with geolocating')
                pass
                # self.geolocation_json = {}

    def _extract_faces(self):
        qs_unknown_person = Person.objects.filter(name='unknown')
        if qs_unknown_person.count() == 0:
            unknown_person = Person(name='unknown')
            unknown_person.save()
        else:
            unknown_person = qs_unknown_person[0]

        image = PIL.Image.open(self.thumbnail)
        image = np.array(image.convert('RGB'))

        face_encodings = face_recognition.face_encodings(image)
        face_locations = face_recognition.face_locations(image)

        faces = []
        if len(face_locations) > 0:
            for idx_face, face in enumerate(zip(face_encodings,
                                                face_locations)):
                face_encoding = face[0]
                face_location = face[1]
                top, right, bottom, left = face_location
                face_image = image[top:bottom, left:right]
                face_image = PIL.Image.fromarray(face_image)

                face = Face()
                face.image_path = self.image_hash + "_" + str(
                    idx_face) + '.jpg'
                face.person = unknown_person
                face.photo = self
                face.location_top = face_location[0]
                face.location_right = face_location[1]
                face.location_bottom = face_location[2]
                face.location_left = face_location[3]
                face.encoding = face_encoding.tobytes().hex()
                #                 face.encoding = face_encoding.dumps()

                face_io = BytesIO()
                face_image.save(face_io, format="JPEG")
                face.image.save(face.image_path,
                                ContentFile(face_io.getvalue()))
                face_io.close()
                face.save()

    def _add_to_album_thing(self):
        if self.search_captions:
            doc = util.nlp('. '.join(self.search_captions.split(' , ')))
            nouns = list(set([t.lemma_ for t in doc if t.tag_ == "NN"]))
            for noun in nouns:
                album_thing = get_album_thing(title=noun)[0]
                album_thing.photos.add(self)
                album_thing.save()

    def _add_to_album_date(self):
        if self.exif_timestamp:
            album_date = get_album_date(date=self.exif_timestamp.date())[0]
            album_date.photos.add(self)
            album_date.save()
        else:
            album_date = get_album_date(date=None)[0]
            album_date.photos.add(self)
            album_date.save()

    def _add_to_album_place(self):
        if self.geolocation_json and len(self.geolocation_json) > 0:
            if 'features' in self.geolocation_json.keys():
                for feature in self.geolocation_json['features']:
                    if 'text' in feature.keys():
                        album_place = get_album_place(feature['text'])[0]
                        album_place.photos.add(self)
                        album_place.save()

    def __str__(self):
        return "%s" % self.image_hash
Exemplo n.º 2
0
class Migration(migrations.Migration):

    dependencies = [
        ('soc', '0002_auto_20180619_1607'),
        ('soc_system', '0001_initial'),
    ]

    operations = [
        migrations.CreateModel(
            name='AdvancedMenu',
            fields=[
                ('id',
                 models.AutoField(verbose_name='ID',
                                  serialize=False,
                                  auto_created=True,
                                  primary_key=True)),
                ('type', models.CharField(default=b'default', max_length=10)),
                ('name', models.CharField(max_length=24)),
                ('sort', models.PositiveSmallIntegerField(default=50)),
                ('is_landing', models.PositiveSmallIntegerField(default=0)),
                ('enable', models.PositiveSmallIntegerField(default=1)),
                ('agent', models.ForeignKey(to='soc.Agent')),
            ],
            options={
                'db_table': 'advanced_menu',
                'verbose_name': '\u81ea\u5b9a\u4e49\u83dc\u5355',
            },
        ),
        migrations.CreateModel(
            name='BlackIPList',
            fields=[
                ('id',
                 models.AutoField(verbose_name='ID',
                                  serialize=False,
                                  auto_created=True,
                                  primary_key=True)),
                ('is_black',
                 models.IntegerField(
                     default=1,
                     choices=[(1, b'\xe9\xbb\x91\xe5\x90\x8d\xe5\x8d\x95'),
                              (2, b'\xe7\x99\xbd\xe5\x90\x8d\xe5\x8d\x95')])),
                ('start_time', models.DateTimeField(null=True, blank=True)),
                ('ip', models.CharField(max_length=64)),
                ('type', models.IntegerField(default=0)),
                ('agent', models.ForeignKey(to='soc.Agent')),
            ],
            options={
                'db_table': 'black_ip_list',
            },
        ),
        migrations.CreateModel(
            name='DefaultMenu',
            fields=[
                ('id',
                 models.AutoField(verbose_name='ID',
                                  serialize=False,
                                  auto_created=True,
                                  primary_key=True)),
                ('index', models.PositiveIntegerField()),
                ('sort', models.PositiveSmallIntegerField(default=50)),
                ('level', models.CharField(max_length=6)),
                ('name', models.CharField(max_length=32)),
                ('topic', models.CharField(max_length=32)),
                ('is_landing', models.PositiveSmallIntegerField(default=0)),
                ('enable', models.PositiveSmallIntegerField(default=1)),
                ('company_show', models.IntegerField(default=1)),
                ('parent',
                 models.ForeignKey(to='soc_system.DefaultMenu', null=True)),
            ],
            options={
                'db_table': 'default_menu',
                'verbose_name': '\u9ed8\u8ba4\u83dc\u5355',
            },
        ),
        migrations.CreateModel(
            name='Message',
            fields=[
                ('id',
                 models.AutoField(verbose_name='ID',
                                  serialize=False,
                                  auto_created=True,
                                  primary_key=True)),
                ('type',
                 models.IntegerField(
                     choices=[(1, b'SMTP\xe9\x82\xae\xe4\xbb\xb6'
                               ), (2, b'\xe7\x9f\xad\xe4\xbf\xa1'
                                   ), (4, b'cloud\xe9\x82\xae\xe4\xbb\xb6')])),
                ('smtp_server', models.CharField(max_length=128, blank=True)),
                ('send_sender', models.CharField(max_length=64, blank=True)),
                ('user', models.CharField(max_length=64, blank=True)),
                ('password', models.CharField(max_length=64, blank=True)),
                ('tls_or_ssl', models.BooleanField(default=False)),
                ('api', models.CharField(max_length=256, blank=True)),
                ('agent', models.ForeignKey(to='soc.Agent')),
            ],
            options={
                'db_table': 'system_message',
                'verbose_name': '\u6d88\u606f\u4e2d\u5fc3\u8bbe\u7f6e',
            },
        ),
        migrations.CreateModel(
            name='Node',
            fields=[
                ('id',
                 models.AutoField(verbose_name='ID',
                                  serialize=False,
                                  auto_created=True,
                                  primary_key=True)),
                ('name', models.CharField(max_length=128)),
                ('uuid',
                 models.CharField(default=uuid.uuid4,
                                  max_length=200,
                                  null=True,
                                  blank=True)),
                ('ip', models.CharField(default=b'127.0.0.1', max_length=200)),
                ('port', models.IntegerField(default=80)),
                ('role', models.CharField(max_length=10)),
                ('type', models.CharField(max_length=15)),
                ('auth_key', models.TextField(null=True)),
                ('status', models.PositiveSmallIntegerField(default=0)),
                ('api_url',
                 models.CharField(max_length=256, null=True, blank=True)),
                ('secret_id', models.CharField(max_length=64, null=True)),
                ('secret_key', models.CharField(max_length=128, null=True)),
                ('accept_parent_connection',
                 models.PositiveSmallIntegerField(default=0)),
                ('accept_next_settings', models.TextField(default=b'{}')),
                ('accept_apply_message',
                 models.PositiveSmallIntegerField(default=0)),
                ('accept_apply_loophole',
                 models.PositiveSmallIntegerField(default=0)),
                ('accept_apply_policy',
                 models.PositiveSmallIntegerField(default=0)),
                ('accept_apply_event_db',
                 models.PositiveSmallIntegerField(default=0)),
                ('accept_apply_engine',
                 models.PositiveSmallIntegerField(default=0)),
                ('accept_apply_center',
                 models.PositiveSmallIntegerField(default=0)),
                ('notify_when_lose_children',
                 models.PositiveSmallIntegerField(default=1)),
                ('last_heartbeat',
                 models.DateTimeField(auto_now_add=True, null=True)),
                ('next_check',
                 models.DateTimeField(auto_now_add=True, null=True)),
                ('version',
                 models.CharField(default=b'', max_length=10, null=True)),
                ('info',
                 models.CharField(default=b'',
                                  max_length=256,
                                  null=True,
                                  blank=True)),
                ('agent', models.ForeignKey(to='soc.Agent')),
            ],
            options={
                'db_table': 'system_node',
                'verbose_name': '\u7ea7\u8054\u8282\u70b9',
            },
        ),
        migrations.CreateModel(
            name='SetPay',
            fields=[
                ('id',
                 models.AutoField(verbose_name='ID',
                                  serialize=False,
                                  auto_created=True,
                                  primary_key=True)),
                ('bank', models.CharField(max_length=32, blank=True)),
                ('username', models.CharField(max_length=32, blank=True)),
                ('bank_user', models.CharField(max_length=64, blank=True)),
                ('email', models.CharField(max_length=32, blank=True)),
                ('pay_online', models.BooleanField(default=True)),
                ('pay_outline', models.BooleanField(default=True)),
                ('invoice', models.BooleanField(default=True)),
                ('agent', models.ForeignKey(to='soc.Agent')),
            ],
            options={
                'db_table': 'system_setpay',
                'verbose_name': '\u8d22\u52a1\u8bbe\u7f6e',
            },
        ),
        migrations.CreateModel(
            name='SystemUpgradeFile',
            fields=[
                ('id',
                 models.AutoField(verbose_name='ID',
                                  serialize=False,
                                  auto_created=True,
                                  primary_key=True)),
                ('name', models.CharField(max_length=256)),
                ('u_type', models.CharField(max_length=15)),
                ('upgrade_type', models.CharField(max_length=15)),
                ('version', models.CharField(max_length=15)),
                ('path', models.FilePathField(null=True)),
                ('build_date', models.DateTimeField(null=True, blank=True)),
                ('agent', models.ForeignKey(to='soc.Agent')),
            ],
            options={
                'db_table': 'system_upgrade_file',
                'verbose_name': '\u5347\u7ea7\u6587\u4ef6',
            },
        ),
        migrations.CreateModel(
            name='SystemUpgradeTask',
            fields=[
                ('id',
                 models.AutoField(verbose_name='ID',
                                  serialize=False,
                                  auto_created=True,
                                  primary_key=True)),
                ('u_type', models.CharField(max_length=15)),
                ('target_id', models.IntegerField(null=True)),
                ('target_name', models.CharField(max_length=256)),
                ('target_uuid', models.CharField(max_length=256)),
                ('target_version', models.CharField(max_length=15)),
                ('target_parent_id', models.IntegerField(null=True)),
                ('file_uuid', models.CharField(max_length=256)),
                ('file_version', models.CharField(max_length=15)),
                ('status', models.PositiveSmallIntegerField(default=0)),
                ('percent', models.PositiveSmallIntegerField(default=0)),
                ('agent', models.ForeignKey(to='soc.Agent')),
            ],
            options={
                'db_table': 'system_upgrade_task',
                'verbose_name': '\u5347\u7ea7\u4efb\u52a1',
            },
        ),
        migrations.AddField(
            model_name='advancedmenu',
            name='default_menu',
            field=models.ForeignKey(to='soc_system.DefaultMenu', null=True),
        ),
    ]
Exemplo n.º 3
0
class Migration(migrations.Migration):

    initial = True

    dependencies = [
        ('people', '0002_auto_20210204_1051'),
        ('music', '0001_initial'),
    ]

    operations = [
        migrations.CreateModel(
            name='Album',
            fields=[
                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
                ('title', models.CharField(max_length=255)),
                ('prefix', models.CharField(blank=True, max_length=20)),
                ('subtitle', models.CharField(blank=True, max_length=255)),
                ('slug', models.SlugField()),
                ('asin', models.CharField(blank=True, max_length=14)),
                ('release_date', models.DateField(blank=True, null=True)),
                ('cover', models.FileField(blank=True, upload_to='albums')),
                ('review', models.TextField(blank=True)),
                ('is_ep', models.BooleanField(default=False)),
                ('is_compilation', models.BooleanField(default=False)),
            ],
            options={
                'db_table': 'music_albums',
                'ordering': ('title',),
            },
        ),
        migrations.CreateModel(
            name='Band',
            fields=[
                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
                ('title', models.CharField(max_length=100)),
                ('prefix', models.CharField(blank=True, max_length=20)),
                ('slug', models.SlugField(unique=True)),
                ('website', models.URLField(blank=True)),
                ('musicians', models.ManyToManyField(blank=True, limit_choices_to={'person_types__slug__exact': 'musician'}, to='people.Person')),
            ],
            options={
                'db_table': 'music_bands',
                'ordering': ('slug', 'title'),
            },
        ),
        migrations.CreateModel(
            name='Genre',
            fields=[
                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
                ('title', models.CharField(max_length=100)),
                ('slug', models.SlugField(unique=True)),
            ],
            options={
                'db_table': 'music_genres',
                'ordering': ('title',),
            },
        ),
        migrations.CreateModel(
            name='Label',
            fields=[
                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
                ('title', models.CharField(max_length=100)),
                ('prefix', models.CharField(blank=True, max_length=20)),
                ('slug', models.SlugField(unique=True)),
                ('website', models.URLField(blank=True)),
            ],
            options={
                'db_table': 'music_labels',
                'ordering': ('title',),
            },
        ),
        migrations.CreateModel(
            name='Track',
            fields=[
                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
                ('title', models.CharField(max_length=255)),
                ('slug', models.SlugField()),
                ('mp3', models.FilePathField(match='.*\\.mp3$', path='/Users/tak/proj/django-site-mac/sitetracks')),
                ('number', models.IntegerField(default=0)),
                ('album', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tracks', to='music.Album')),
                ('band', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tracks', to='music.Band')),
            ],
            options={
                'db_table': 'music_tracks',
                'ordering': ('album', 'number', 'title', 'mp3'),
            },
        ),
        migrations.AddField(
            model_name='album',
            name='band',
            field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='music.Band'),
        ),
        migrations.AddField(
            model_name='album',
            name='genre',
            field=models.ManyToManyField(blank=True, to='music.Genre'),
        ),
        migrations.AddField(
            model_name='album',
            name='label',
            field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='music.Label'),
        ),
    ]
Exemplo n.º 4
0
class Job(models.Model):
    AUTHORIZED, UNDER_REVIEW = 1, 2
    AUTH_CHOICES = [(AUTHORIZED, "Authorized"),
                    (UNDER_REVIEW, "Authorization Required")]

    QUEUED, RUNNING, COMPLETED, ERROR, SPOOLED, PAUSED = range(1, 7)

    STATE_CHOICES = [(QUEUED, "Queued"), (RUNNING, "Running"),
                     (PAUSED, "Paused"), (SPOOLED, "Spooled"),
                     (COMPLETED, "Completed"), (ERROR, "Error")]

    state = models.IntegerField(default=QUEUED, choices=STATE_CHOICES)

    deleted = models.BooleanField(default=False)
    name = models.CharField(max_length=MAX_NAME_LEN, default="New results")
    image = models.ImageField(default=None,
                              blank=True,
                              upload_to=image_path,
                              max_length=MAX_FIELD_LEN)

    # The user that edited the object most recently.
    lastedit_user = models.ForeignKey(User,
                                      related_name='job_editor',
                                      null=True,
                                      on_delete=models.CASCADE)
    lastedit_date = models.DateTimeField(default=timezone.now)

    owner = models.ForeignKey(User, on_delete=models.CASCADE)
    text = models.TextField(default='Result description.',
                            max_length=MAX_TEXT_LEN)
    html = models.TextField(default='html')

    # Job creation date
    date = models.DateTimeField(auto_now_add=True)

    # Job runtime date.
    start_date = models.DateTimeField(null=True, blank=True)
    end_date = models.DateTimeField(null=True, blank=True)

    analysis = models.ForeignKey(Analysis, on_delete=models.CASCADE)
    project = models.ForeignKey(Project, on_delete=models.CASCADE)
    json_text = models.TextField(default="commands")

    uid = models.CharField(max_length=32)

    template = models.TextField(default="makefile")

    # Set the security level.
    security = models.IntegerField(default=UNDER_REVIEW, choices=AUTH_CHOICES)

    # This will be set when the job attempts to run.
    script = models.TextField(default="")

    # Keeps track of errors.
    stdout_log = models.TextField(default="", max_length=MAX_LOG_LEN)

    # Standard error.
    stderr_log = models.TextField(default="", max_length=MAX_LOG_LEN)

    # Will be false if the objects is to be deleted.
    valid = models.BooleanField(default=True)

    path = models.FilePathField(default="")

    objects = Manager()

    def is_running(self):
        return self.state == Job.RUNNING

    def is_success(self):
        return self.state == Job.COMPLETED

    def is_error(self):
        return self.state == Job.ERROR

    def is_started(self):
        """
        This job has been initiated.
        """
        return self.state in [Job.QUEUED, Job.SPOOLED, Job.RUNNING]

    def is_finished(self):
        """
        This job is fishined
        """
        return self.state in [Job.ERROR, Job.COMPLETED]

    def __str__(self):
        return self.name

    def get_url(self, path=''):
        """
        Return the url to the job directory
        """
        return f"jobs/{self.uid}/" + path

    def url(self):
        return reverse("job_view", kwargs=dict(uid=self.uid))

    def get_project_dir(self):
        return self.project.get_project_dir()

    def get_data_dir(self):
        # TODO: MIGRATION FIX - needs refactoring
        path = join(settings.MEDIA_ROOT, "jobs", self.uid)
        return path

    @property
    def json_data(self):
        "Returns the json_text as parsed json_data"
        return hjson.loads(self.json_text)

    def elapsed(self):
        if not (self.start_date and self.end_date):
            value = ''
        else:
            seconds = int((self.end_date - self.start_date).seconds)
            if seconds < 60:
                value = f'{seconds} seconds'
            elif seconds < 3600:
                minutes = int(seconds / 60)
                value = f'{minutes} minutes'
            else:
                hours = round(seconds / 3600, 1)
                value = f'{hours} hours'

        return value

    def done(self):
        return self.state == Job.COMPLETED

    def make_path(self):
        path = join(settings.MEDIA_ROOT, "jobs", f"{self.uid}")
        return path

    def save(self, *args, **kwargs):
        now = timezone.now()
        self.name = self.name or f"Results for: {self.analysis.name}"
        self.date = self.date or now
        self.text = self.text or self.analysis.text
        self.html = make_html(self.text, user=self.lastedit_user)
        self.name = self.name[:MAX_NAME_LEN]
        self.uid = self.uid or util.get_uuid(8)
        self.template = self.analysis.template
        self.stderr_log = self.stderr_log[:MAX_LOG_LEN]
        self.stdout_log = self.stdout_log[:MAX_LOG_LEN]
        self.name = self.name or self.analysis.name
        self.path = self.make_path()

        self.lastedit_user = self.lastedit_user or self.owner or self.project.owner
        self.lastedit_date = self.lastedit_date or now

        if not os.path.isdir(self.path):
            os.makedirs(self.path)
        self.project.set_counts(save=True)

        super(Job, self).save(*args, **kwargs)

    @property
    def summary(self):
        """
        Creates informative job summary that shows job parameters.
        """
        summary_template = "widgets/job_summary.html"
        context = dict(data=self.json_data)
        template = loader.get_template(summary_template)
        result = template.render(context)

        return result

    def runnable(self):
        """
        Job is authorized to run
        """
        authorized = self.analysis.runnable(
        ) and self.security == self.AUTHORIZED
        return authorized

    def get_name(self):
        if self.deleted:
            return f'Deleted: {self.name}'

        return self.name
Exemplo n.º 5
0
class Migration(migrations.Migration):

    initial = True

    dependencies = [
        ('system', '0001_initial'),
    ]

    operations = [
        migrations.CreateModel(
            name='Cluster',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.TextField(default='VultureOS')),
            ],
        ),
        migrations.CreateModel(
            name='Feed',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('filename',
                 models.FilePathField(path='/var/db/darwin/', unique=True)),
                ('label', models.TextField()),
                ('description', models.TextField()),
                ('last_update', models.DateTimeField()),
                ('nb_netset', models.IntegerField()),
                ('nb_unique', models.IntegerField()),
                ('type', models.TextField()),
            ],
        ),
        migrations.CreateModel(
            name='Monitor',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('date', models.DateTimeField()),
                ('node',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='system.Node')),
            ],
        ),
        migrations.CreateModel(
            name='RSS',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('title', models.TextField(unique=True)),
                ('date', models.DateTimeField()),
                ('level', models.TextField()),
                ('content', models.TextField()),
                ('ack', models.BooleanField(default=False)),
            ],
        ),
        migrations.CreateModel(
            name='ServiceStatus',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.TextField()),
                ('status', models.TextField()),
            ],
        ),
        migrations.AddField(
            model_name='monitor',
            name='services',
            field=djongo.models.fields.ArrayReferenceField(
                default=[],
                on_delete=djongo.models.fields.ArrayReferenceField._on_delete,
                to='gui.ServiceStatus'),
        ),
    ]
Exemplo n.º 6
0
class Project(models.Model):
    PUBLIC, SHAREABLE, PRIVATE = 1, 2, 3
    PRIVACY_CHOICES = [(PRIVATE, "Private"), (SHAREABLE, "Shared"),
                       (PUBLIC, "Public")]

    # Rank in a project list.
    rank = models.FloatField(default=100)

    # The user that edited the object most recently.
    lastedit_user = models.ForeignKey(User,
                                      related_name='proj_editor',
                                      null=True,
                                      on_delete=models.CASCADE)
    lastedit_date = models.DateTimeField(default=timezone.now)

    # Limits who can access the project.
    privacy = models.IntegerField(default=PRIVATE, choices=PRIVACY_CHOICES)
    image = models.ImageField(default=None,
                              blank=True,
                              upload_to=image_path,
                              max_length=MAX_FIELD_LEN)
    name = models.CharField(default="New Project", max_length=MAX_NAME_LEN)
    deleted = models.BooleanField(default=False)

    # We need to keep the owner.
    owner = models.ForeignKey(User, null=False, on_delete=models.CASCADE)
    text = models.TextField(default='Project description.',
                            max_length=MAX_TEXT_LEN)

    html = models.TextField(default='html', max_length=MAX_LOG_LEN)
    date = models.DateTimeField(auto_now_add=True)
    # Internal uid that is not editable.
    uid = models.CharField(max_length=32, unique=True)

    # FilePathField points to an existing project directory.
    dir = models.FilePathField(max_length=MAX_FIELD_LEN, default='')

    sharable_token = models.CharField(max_length=32, null=True, unique=True)

    data_count = models.IntegerField(default=0, null=True, db_index=True)
    recipes_count = models.IntegerField(default=0, null=True, db_index=True)
    jobs_count = models.IntegerField(default=0, null=True, db_index=True)

    objects = Manager()

    def save(self, *args, **kwargs):
        now = timezone.now()
        self.date = self.date or now
        self.sharable_token = self.sharable_token or util.get_uuid(30)
        self.html = make_html(self.text, user=self.lastedit_user)
        self.name = self.name[:MAX_NAME_LEN]
        self.uid = self.uid or util.get_uuid(8)
        self.lastedit_user = self.lastedit_user or self.owner
        self.lastedit_date = now

        super(Project, self).save(*args, **kwargs)

    def __str__(self):
        return self.name

    def uid_is_set(self):
        assert bool(
            self.uid.strip()), "Sanity check. UID should always be set."

    def url(self):
        self.uid_is_set()
        return reverse("project_view", kwargs=dict(uid=self.uid))

    def get_project_dir(self):
        return self.dir

    def get_data_dir(self):
        "Match consistency of data dir calls"
        return self.get_project_dir()

    def set_counts(self, save=True):
        """
        Set the data, recipe, and job count for this project
        """

        # Set the counts on current instance and also update in database.
        self.recipes_count = self.analysis_set.filter(project=self,
                                                      deleted=False).count()
        self.data_count = self.data_set.filter(deleted=False).count()
        self.job_count = self.job_set.filter(deleted=False).count()

        Project.objects.filter(id=self.id).update(
            data_count=self.data_count,
            recipes_count=self.recipes_count,
            jobs_count=self.job_count)

    @property
    def is_public(self):
        return self.privacy == self.PUBLIC

    @property
    def is_private(self):
        return self.privacy == self.PRIVATE

    @property
    def project(self):
        return self

    @property
    def json_text(self):
        return hjson.dumps(self.json_data)

    @property
    def api_data(self):
        img = self.image
        strimg = img_to_str(img=img)

        json_data = dict(
            uid=self.uid,
            name=self.name,
            privacy=dict(self.PRIVACY_CHOICES)[self.privacy],
            text=self.text,
            url=settings.BASE_URL,
            project_uid=self.uid,
            id=self.pk,
            image=strimg,
            # Insert recipes API data in there as well.
            recipes=[r.api_data for r in self.analysis_set.all()])

        return json_data

    @property
    def summary(self):
        """
        Returns first line of text
        """
        lines = self.text.splitlines() or ['']
        first = lines[0]
        return first

    @property
    def delete_url(self):
        return reverse('project_delete', kwargs=dict(uid=self.uid))

    @property
    def is_shareable(self):
        return self.privacy == self.SHAREABLE

    def get_sharable_link(self):

        # Return a sharable link if the project is shareable
        if self.is_shareable:
            return reverse('project_share',
                           kwargs=dict(token=self.sharable_token))

        return '/'

    def get_name(self):
        if self.deleted:
            return f'Deleted Project: {self.name}'
        return self.name
Exemplo n.º 7
0
class NIfTI(TimeStampedModel):
    """
    A model representing a NIfTI_ file in the database.

    .. _NIfTI: https://nifti.nimh.nih.gov/nifti-1/
    """

    #: Path of the *.nii* file within the application's media directory.
    path = models.FilePathField(max_length=1000, unique=True)

    #: Whether the created instance is the product of a direct conversion from
    #: some raw format to NIfTI or of a manipulation of the data.
    is_raw = models.BooleanField(default=False)

    APPENDIX_FILES: Iterable[str] = {".json", ".bval", ".bvec"}

    # Used to cache JSON data to prevent multiple reads.
    _json_data = None

    # Logger instance for this model.
    _logger = logging.getLogger("data.mri.nifti")

    class Meta:
        verbose_name = "NIfTI"
        ordering = ("-id", )

    def get_data(self, dtype: np.dtype = np.float64) -> np.ndarray:
        """
        Uses NiBabel_ to return the underlying pixel data as a NumPy_ array.

        .. _NiBabel: https://nipy.org/nibabel/
        .. _NumPy: http://www.numpy.org/

        Returns
        -------
        np.ndarray
            Pixel data.
        """

        return nib.load(str(self.path)).get_fdata(dtype=dtype)

    def get_b_value(self) -> List[int]:
        """
        Returns the degree of diffusion weighting applied (b-value_) for each
        diffusion direction. This method relies on dcm2niix_'s default
        configuration in which when diffusion-weighted images (DWI_) are
        converted, another file with the same name and a "bval" extension is
        created alongside.

        .. _b-value: https://radiopaedia.org/articles/b-values-1
        .. _dcm2niix: https://github.com/rordenlab/dcm2niix
        .. _DWI: https://en.wikipedia.org/wiki/Diffusion_MRI

        Hint
        ----
        For more information, see dcm2niix's `Diffusion Tensor Imaging`_
        section of the user guide.

        .. _Diffusion Tensor Imaging:
           https://www.nitrc.org/plugins/mwiki/index.php/dcm2nii:MainPage#Diffusion_Tensor_Imaging

        See Also
        --------
        * :attr:`b_value`

        Returns
        -------
        List[int]
            b-value for each diffusion direction.
        """
        file_name = self.b_value_file
        if file_name:
            with open(file_name, "r") as file_object:
                content = file_object.read()
            content = content.splitlines()[0].split(" ")
            return [int(value) for value in content]
        return None

    def get_b_vector(self) -> List[List[float]]:
        """
        Returns the b-vectors_ representing the diffusion weighting gradient
        scheme. This method relies on dcm2niix_'s default configuration in
        which when diffusion-weighted images (DWI_) are converted, another file
        with the same name and a "bvec" extension is created alongside.

        .. _b-vectors:
           https://mrtrix.readthedocs.io/en/latest/concepts/dw_scheme.html
        .. _dcm2niix: https://github.com/rordenlab/dcm2niix
        .. _DWI: https://en.wikipedia.org/wiki/Diffusion_MRI

        Hint
        ----
        For more information, see dcm2niix's `Diffusion Tensor Imaging`_
        section of the user guide.

        .. _Diffusion Tensor Imaging:
           https://www.nitrc.org/plugins/mwiki/index.php/dcm2nii:MainPage#Diffusion_Tensor_Imaging

        See Also
        --------
        * :attr:`b_vector`

        Returns
        -------
        List[List[float]]
            b-value for each diffusion direction
        """
        file_name = self.b_vector_file
        if file_name:
            with open(file_name, "r") as file_object:
                content = file_object.read()
            return [[float(value) for value in vector.rstrip().split(" ")]
                    for vector in content.rstrip().split("\n")]

    def read_json(self) -> dict:
        """
        Returns the JSON data generated alognside *.nii* files generated
        using dcm2niix_\'s *"BIDS sidecar"* option.

        .. _dcm2niix: https://github.com/rordenlab/dcm2niix

        Notes
        -----
        * For more information about dcm2niix and the BIDS sidecar, see
          dcm2niix's `general usage manual`_.
        * For more information about the extracted properties and their usage
          see `Acquiring and Using Field-maps`_

        .. _Acquiring and Using Field-maps:
           https://lcni.uoregon.edu/kb-articles/kb-0003
        .. _general usage manual:
            https://www.nitrc.org/plugins/mwiki/index.php/dcm2nii:MainPage#General_Usage

        Returns
        -------
        dict
            BIDS sidecar information stored in a JSON file, or *{}* if the file
            doesn't exist
        """
        if self.json_file.is_file():
            with open(self.json_file, "r") as f:
                return json.load(f)
        return {}

    def get_total_readout_time(self) -> float:
        """
        Reads the total readout time extracted by dcm2niix_ upon conversion.

        .. _dcm2niix: https://github.com/rordenlab/dcm2niix

        Hint
        ----
        Total readout time is defined as the time from the center of the first
        echo to the center of the last (in seconds).

        Returns
        -------
        float
            Total readout time
        """
        return self.json_data.get("TotalReadoutTime")

    def get_effective_spacing(self) -> float:
        """
        Reads the effective echo spacing value extracted by dcm2niix_ upon
        conversion.

        .. _dcm2niix: https://github.com/rordenlab/dcm2niix

        Returns
        -------
        float
            Effective echo spacing
        """
        return self.json_data.get("EffectiveEchoSpacing")

    def get_phase_encoding_direction(self) -> float:
        """
        Reads the phase encoding direction value extracted by dcm2niix_ upon
        conversion.

        .. _dcm2niix: https://github.com/rordenlab/dcm2niix

        Returns
        -------
        float
            Phase encoding direction
        """
        return self.json_data.get("PhaseEncodingDirection")

    def compress(self, keep_source: bool = False) -> Path:
        """
        Compress the associated *.nii* using gzip, if it isn't already
        compressed.

        Parameters
        ----------
        keep_source : bool, optional
            Whether to keep a copy of the uncompressed file, by default False

        Returns
        -------
        Path
            Path of the compressed (*.nii.gz*) file
        """
        if not self.is_compressed:
            uncompressed_path = Path(self.path)
            compressed_path = compress(uncompressed_path,
                                       keep_source=keep_source)
            self.path = str(compressed_path)
            self.save()
        return Path(self.path)

    def uncompress(self, keep_source: bool = False) -> Path:
        """
        Uncompress the associated *.nii* using gzip, if it isn't already
        uncompressed.

        Parameters
        ----------
        keep_source : bool, optional
            Whether to keep a copy of the compressed file, by default False

        Returns
        -------
        Path
            Path of the uncompressed (*.nii*) file
        """
        if self.is_compressed:
            compressed_path = Path(self.path)
            uncompressed_path = uncompress(compressed_path,
                                           keep_source=keep_source)
            self.path = str(uncompressed_path)
            self.save()
        return Path(self.path)

    def _resolve_compression_state(self) -> None:
        """
        Fixed the instance's path in case it's out of sync with compression
        state. This method is used for testing and Should not be required
        under normal circumstances.

        Raises
        ------
        FileNotFoundError
            No associated file found in the file system
        """
        path = Path(self.path)
        is_compressed = path.suffix == ".gz"
        compressed_path = path if is_compressed else path.with_suffix(".gz")
        uncompressed_path = path if not is_compressed else path.with_suffix("")
        valid_compressed = is_compressed and compressed_path.exists()
        valid_uncompressed = uncompressed_path.exists() and not is_compressed
        if not valid_compressed and uncompressed_path.exists():
            self.path = str(path.with_suffix(""))
            self.save()
        elif not valid_uncompressed and compressed_path.exists():
            self.path = str(path.with_suffix(".gz"))
            self.save()
        elif valid_compressed or valid_uncompressed:
            return
        else:
            message = NIFTI_FILE_MISSING.format(pk=self.id, path=self.path)
            raise FileNotFoundError(message)

    def rename(self,
               destination: Union[Path, str],
               log_level: int = logging.DEBUG):
        source = Path(self.path)
        destination = Path(destination)
        self._logger.log(log_level, f"Moving NIfTI #{self.id}...")
        self._logger.log(log_level, f"Source:\t{source}")
        self._logger.log(log_level, f"Destination:\t{destination}")
        destination.parent.mkdir(parents=True, exist_ok=True)
        source.rename(destination)
        source_base_name = source.name.split(".")[0]
        destination_base_name = destination.name.split(".")[0]
        for possible_appendix in self.APPENDIX_FILES:
            appendix = (source.parent /
                        source_base_name).with_suffix(possible_appendix)
            if appendix.exists():
                self._logger.log(log_level,
                                 f"Moving {possible_appendix} appendix...")
                appendix_destination = (
                    destination.parent /
                    destination_base_name).with_suffix(possible_appendix)
                appendix.rename(appendix_destination)
                self._logger.log(
                    log_level,
                    f"Appended {possible_appendix} moved to {appendix_destination}.",  # noqa: E501
                )
        if self.is_raw and self.scan:
            self._logger.log(log_level,
                             f"Found associated scan (#{self.scan.id}).")
            self._logger.log(log_level,
                             "Querying scan's input set for changed runs...")
            for input_instance in self.scan.input_set.all():
                is_file_input = isinstance(input_instance, FileInput)
                is_list_input = isinstance(input_instance, ListInput)
                if is_file_input and input_instance.value == str(source):
                    self._logger.log(
                        log_level,
                        f"Found changed file input instance:\n{input_instance}",  # noqa: E501
                    )
                    self._logger.log(log_level, "Updating file input value...")
                    input_instance.value = str(destination)
                    input_instance.save()
                    self._logger.log(log_level, "done!")
                elif is_list_input and str(source) in input_instance.value:
                    self._logger.log(
                        log_level,
                        f"Found changed list input instance:\n{input_instance}",  # noqa: E501
                    )
                    self._logger.log(log_level, "Updating list input value...")
                    input_instance.value = [
                        path if path != str(source) else str(destination)
                        for path in input_instance.value
                    ]
                    input_instance.save()
                    self._logger.log(log_level, "done!")
        self.path = str(destination)
        self._logger.log(log_level,
                         f"NIfTI {self.id} file successfully moved.")
        self.save()

    @property
    def json_file(self) -> Path:
        """
        Return path to the corresponding json file.
        Returns
        -------
        Path
            Corresponding json file
        """
        base_name = Path(self.path).name.split(".")[0]
        return (Path(self.path).parent / base_name).with_suffix(".json")

    @property
    def json_data(self) -> dict:
        """
        Reads BIDS sidecar information and caches within a local variable to
        prevent multiple reads.

        See Also
        --------
        * :meth:`read_json`

        Returns
        -------
        dict
            "BIDS sidecar" JSON data
        """
        if self._json_data is None:
            self._json_data = self.read_json()
        return self._json_data

    @property
    def b_value_file(self) -> Path:
        """
        Return FSL format b-value file path

        Returns
        -------
        Path
            FSL format b-value file path
        """
        p = Path(self.path)
        bval_file = p.parent / Path(p.stem).with_suffix(".bval")
        if bval_file.is_file():
            return bval_file

    @property
    def b_vector_file(self) -> Path:
        """
        Return FSL format b-vector file path.

        Returns
        -------
        Path
            FSL format b-vector file path
        """
        p = Path(self.path)
        bvec_file = p.parent / Path(p.stem).with_suffix(".bvec")
        if bvec_file.is_file():
            return bvec_file

    @property
    def b_value(self) -> List[int]:
        """
        Returns the B-value of DWI scans as calculated by dcm2niix_.

        .. _dcm2niix: https://github.com/rordenlab/dcm2niix

        See Also
        --------
        * :meth:`get_b_value`

        Returns
        -------
        List[int]
            B-value
        """
        return self.get_b_value()

    @property
    def b_vector(self) -> List[List[float]]:
        """
        Returns the B-vector of DWI scans as calculated by dcm2niix_.

        .. _dcm2niix: https://github.com/rordenlab/dcm2niix

        See Also
        --------
        * :meth:`get_b_vector`

        Returns
        -------
        List[List[float]]
            B-vector
        """
        return self.get_b_vector()

    @property
    def is_compressed(self) -> bool:
        """
        Whether the associated *.nii* file is compressed with gzip or not.

        Returns
        -------
        bool
            Associated *.nii* file gzip compression state
        """

        return Path(self.path).suffix == ".gz"

    @property
    def compressed(self) -> Path:
        """
        Compresses the associated *.nii* file using gzip if it isn't and
        returns its path.

        Returns
        -------
        Path
            Compressed *.nii.gz* file associated with this instance
        """
        return self.compress()

    @property
    def uncompressed(self) -> Path:
        """
        Uncompresses the associated *.nii* file using gzip if it isn't and
        returns its path.

        Returns
        -------
        Path
            Uncompressed *.nii* file associated with this instance
        """
        return self.uncompress()
Exemplo n.º 8
0
class FilePathPKData(models.Model):
    data = models.FilePathField(primary_key=True)
Exemplo n.º 9
0
class FilePathData(models.Model):
    data = models.FilePathField(null=True)
Exemplo n.º 10
0
class Migration(migrations.Migration):

    initial = True

    dependencies = [
        ('contenttypes', '0002_remove_content_type_name'),
    ]

    operations = [
        migrations.CreateModel(
            name='Mail',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('subject',
                 models.CharField(help_text='E-Mail Betreff', max_length=255)),
                ('generator',
                 models.CharField(
                     default='notification.generator.generic.BasicGenerator',
                     max_length=255)),
                ('reason', models.SlugField(max_length=100, unique=True)),
            ],
        ),
        migrations.CreateModel(
            name='MailLogLine',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('timestamp', models.TimeField()),
                ('type', models.CharField(max_length=255)),
                ('payload', models.TextField()),
                ('insert', models.DateTimeField(auto_now_add=True)),
            ],
        ),
        migrations.CreateModel(
            name='MailLogSession',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('uuid', models.UUIDField(editable=False)),
                ('reference',
                 models.CharField(blank=True, max_length=255, null=True)),
                ('recipient_email', models.EmailField(max_length=255)),
                ('sender_email', models.EmailField(max_length=255)),
                ('context', models.TextField()),
                ('email', models.TextField()),
                ('send_at', models.DateTimeField(auto_now=True)),
                ('content_type',
                 models.ForeignKey(
                     blank=True,
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='contenttypes.ContentType')),
            ],
        ),
        migrations.CreateModel(
            name='MailTemplate',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=255)),
                ('html_file',
                 models.FilePathField(max_length=500,
                                      path='mailsystem/templates/mailsystem/',
                                      recursive=True)),
                ('alternative_file',
                 models.FilePathField(max_length=500,
                                      path='mailsystem/templates/mailsystem/',
                                      recursive=True)),
            ],
        ),
        migrations.CreateModel(
            name='MailTemplateVariable',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name',
                 models.CharField(help_text='Variablen Name', max_length=255)),
                ('default', models.TextField(default='Undefined Value')),
                ('template',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='mailsystem.MailTemplate')),
            ],
        ),
        migrations.CreateModel(
            name='MailVariable',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('content', models.TextField()),
                ('mail',
                 models.ForeignKey(
                     on_delete=django.db.models.deletion.DO_NOTHING,
                     to='mailsystem.Mail')),
                ('mail_template_variable',
                 models.ForeignKey(
                     on_delete=django.db.models.deletion.DO_NOTHING,
                     to='mailsystem.MailTemplateVariable')),
            ],
        ),
        migrations.AddField(
            model_name='maillogline',
            name='protocol',
            field=models.ForeignKey(
                on_delete=django.db.models.deletion.CASCADE,
                to='mailsystem.MailLogSession'),
        ),
        migrations.AddField(
            model_name='mail',
            name='template',
            field=models.ForeignKey(
                on_delete=django.db.models.deletion.DO_NOTHING,
                to='mailsystem.MailTemplate'),
        ),
    ]
Exemplo n.º 11
0
class TranslationProject(models.Model, CachedTreeItem):

    language = models.ForeignKey(Language,
                                 db_index=True,
                                 on_delete=models.CASCADE)
    project = models.ForeignKey(Project,
                                db_index=True,
                                on_delete=models.CASCADE)
    real_path = models.FilePathField(editable=False, null=True, blank=True)
    directory = models.OneToOneField(Directory,
                                     db_index=True,
                                     editable=False,
                                     on_delete=models.CASCADE)
    pootle_path = models.CharField(max_length=255,
                                   null=False,
                                   unique=True,
                                   db_index=True,
                                   editable=False)
    creation_time = models.DateTimeField(auto_now_add=True,
                                         db_index=True,
                                         editable=False,
                                         null=True)
    revisions = GenericRelation(Revision)

    objects = TranslationProjectManager()

    class Meta(object):
        unique_together = (('language', 'project'), ('project', 'language'))
        db_table = 'pootle_app_translationproject'
        # disabled objects are hidden for related objects too
        base_manager_name = 'objects'

    @cached_property
    def code(self):
        return u'-'.join([self.language.code, self.project.code])

    @cached_property
    def data_tool(self):
        return data_tool.get(self.__class__)(self)

    # # # # # # # # # # # # # #  Properties # # # # # # # # # # # # # # # # # #

    @property
    def name(self):
        # TODO: See if `self.fullname` can be removed
        return self.fullname

    @property
    def fullname(self):
        return "%s [%s]" % (self.project.fullname, self.language.name)

    @property
    def abs_real_path(self):
        if self.real_path is not None:
            return absolute_real_path(self.real_path)

    @abs_real_path.setter
    def abs_real_path(self, value):
        if value is not None:
            self.real_path = relative_real_path(value)
        else:
            self.real_path = None

    @property
    def file_style(self):
        return self.project.get_treestyle()

    @property
    def checker(self):
        from translate.filters import checks
        checkerclasses = [
            checks.projectcheckers.get(self.project.checkstyle,
                                       checks.StandardChecker)
        ]
        return checks.TeeChecker(checkerclasses=checkerclasses,
                                 excludefilters=EXCLUDED_FILTERS,
                                 errorhandler=self.filtererrorhandler,
                                 languagecode=self.language.code)

    @property
    def disabled(self):
        return self.project.disabled

    @cached_property
    def templates_tp(self):
        return self.project.get_template_translationproject()

    @property
    def is_template_project(self):
        return self == self.templates_tp

    # # # # # # # # # # # # # #  Methods # # # # # # # # # # # # # # # # # # #

    def __unicode__(self):
        return self.pootle_path

    def __init__(self, *args, **kwargs):
        super(TranslationProject, self).__init__(*args, **kwargs)

    def save(self, *args, **kwargs):
        self.directory = self.language.directory \
                                      .get_or_make_subdir(self.project.code)
        self.pootle_path = self.directory.pootle_path

        if self.project.treestyle != 'pootle_fs':
            from pootle_app.project_tree import get_translation_project_dir
            self.abs_real_path = get_translation_project_dir(
                self.language,
                self.project,
                self.file_style,
                make_dirs=not self.directory.obsolete)
        else:
            self.abs_real_path = None
        super(TranslationProject, self).save(*args, **kwargs)
        if self.directory.tp_id != self.pk:
            self.directory.tp = self
            self.directory.save()

    def delete(self, *args, **kwargs):
        directory = self.directory

        super(TranslationProject, self).delete(*args, **kwargs)
        directory.delete()

    def get_absolute_url(self):
        return reverse('pootle-tp-browse',
                       args=split_pootle_path(self.pootle_path)[:-1])

    def get_translate_url(self, **kwargs):
        return u''.join([
            reverse("pootle-tp-translate",
                    args=split_pootle_path(self.pootle_path)[:-1]),
            get_editor_filter(**kwargs)
        ])

    def get_announcement(self, user=None):
        """Return the related announcement, if any."""
        return StaticPage.get_announcement_for(self.pootle_path, user)

    def filtererrorhandler(self, functionname, str1, str2, e):
        logging.error(u"Error in filter %s: %r, %r, %s", functionname, str1,
                      str2, e)
        return False

    def is_accessible_by(self, user):
        """Returns `True` if the current translation project is accessible
        by `user`.
        """
        if user.is_superuser:
            return True

        return self.project.code in Project.accessible_by_user(user)

    def can_be_inited_from_templates(self):
        """Returns `True` if the current translation project hasn't been
        saved yet and can be initialized from templates.
        """

        # This method checks if the current translation project directory
        # doesn't exist. So it won't work if the translation project is already
        # saved the database because the translation project directory is
        # auto-created in `save()` method.
        return (not self.is_template_project and self.templates_tp is not None
                and not translation_project_dir_exists(self.language,
                                                       self.project))

    def init_from_templates(self):
        """Initializes the current translation project files using
        the templates TP ones.
        """
        template_stores = self.templates_tp.stores.live().select_related(
            "filetype__template_extension",
            "filetype__extension").exclude(file="")

        for template_store in template_stores.iterator():
            init_store_from_template(self, template_store)

        self.update_from_disk()

    def update_from_disk(self, force=False, overwrite=False):
        with update_tp_after(self):
            self._update_from_disk(force=force, overwrite=overwrite)

    def _update_from_disk(self, force=False, overwrite=False):
        """Update all stores to reflect state on disk."""
        changed = []

        logging.info(u"Scanning for new files in %s", self)
        # Create new, make obsolete in-DB stores to reflect state on disk
        self.scan_files()

        stores = self.stores.live().select_related(
            "parent", "data", "filetype__extension",
            "filetype__template_extension").exclude(file='')
        # Update store content from disk store
        for store in stores.iterator():
            if not store.file:
                continue
            disk_mtime = store.get_file_mtime()
            if not force and disk_mtime == store.file_mtime:
                # The file on disk wasn't changed since the last sync
                logging.debug(
                    u"File didn't change since last sync, "
                    u"skipping %s", store.pootle_path)
                continue
            if store.updater.update_from_disk(overwrite=overwrite):
                changed.append(store)
        return changed

    def sync(self, conservative=True, skip_missing=False, only_newer=True):
        """Sync unsaved work on all stores to disk"""
        stores = self.stores.live().exclude(file='').filter(state__gte=PARSED)
        for store in stores.select_related("parent").iterator():
            store.sync(update_structure=not conservative,
                       conservative=conservative,
                       skip_missing=skip_missing,
                       only_newer=only_newer)

    # # # TreeItem
    def get_children(self):
        return self.directory.children

    def get_parents(self):
        return [self.project]

    # # # /TreeItem

    def directory_exists_on_disk(self):
        """Checks if the actual directory for the translation project
        exists on disk.
        """
        return not does_not_exist(self.abs_real_path)

    def scan_files(self):
        """Scans the file system and returns a list of translation files.
        """
        projects = [p.strip() for p in self.project.ignoredfiles.split(',')]
        ignored_files = set(projects)

        filetypes = self.project.filetype_tool
        exts = filetypes.filetype_extensions

        # Scan for pots if template project
        if self.is_template_project:
            exts = filetypes.template_extensions

        from pootle_app.project_tree import (add_files,
                                             match_template_filename,
                                             direct_language_match_filename)

        all_files = []
        new_files = []

        if self.file_style == 'gnu':
            if self.pootle_path.startswith('/templates/'):
                file_filter = lambda filename: match_template_filename(
                    self.project,
                    filename,
                )
            else:
                file_filter = lambda filename: direct_language_match_filename(
                    self.language.code,
                    filename,
                )
        else:
            file_filter = lambda filename: True

        all_files, new_files, __ = add_files(
            self,
            ignored_files,
            exts,
            self.real_path,
            self.directory,
            file_filter,
        )

        return all_files, new_files
Exemplo n.º 12
0
 class Model(models.Model):
     field = models.FilePathField(allow_files=False,
                                  allow_folders=False)
Exemplo n.º 13
0
class TranslationProject(models.Model, CachedTreeItem):

    language = models.ForeignKey(Language,
                                 db_index=True,
                                 on_delete=models.CASCADE)
    project = models.ForeignKey(Project,
                                db_index=True,
                                on_delete=models.CASCADE)
    real_path = models.FilePathField(editable=False, null=True, blank=True)
    directory = models.OneToOneField(Directory,
                                     db_index=True,
                                     editable=False,
                                     on_delete=models.CASCADE)
    pootle_path = models.CharField(max_length=255,
                                   null=False,
                                   unique=True,
                                   db_index=True,
                                   editable=False)
    creation_time = models.DateTimeField(auto_now_add=True,
                                         db_index=True,
                                         editable=False,
                                         null=True)

    _non_db_state_cache = LRUCachingDict(PARSE_POOL_SIZE,
                                         PARSE_POOL_CULL_FREQUENCY)

    objects = TranslationProjectManager()

    class Meta(object):
        unique_together = ("language", "project")
        db_table = "pootle_app_translationproject"
        base_manager_name = "objects"

    @cached_property
    def code(self):
        return u"-".join([self.language.code, self.project.code])

    # # # # # # # # # # # # # #  Properties # # # # # # # # # # # # # # # # # #

    @property
    def name(self):
        # TODO: See if `self.fullname` can be removed
        return self.fullname

    @property
    def fullname(self):
        return "%s [%s]" % (self.project.fullname, self.language.name)

    @property
    def abs_real_path(self):
        if self.real_path is not None:
            return absolute_real_path(self.real_path)

    @abs_real_path.setter
    def abs_real_path(self, value):
        if value is not None:
            self.real_path = relative_real_path(value)
        else:
            self.real_path = None

    @property
    def checker(self):
        from translate.filters import checks

        # We do not use default Translate Toolkit checkers; instead use
        # our own one
        if settings.ZING_QUALITY_CHECKER:
            from pootle_misc.util import import_func

            checkerclasses = [import_func(settings.ZING_QUALITY_CHECKER)]
        else:
            checkerclasses = [
                checks.projectcheckers.get(self.project.checkstyle,
                                           checks.StandardChecker)
            ]

        return checks.TeeChecker(
            checkerclasses=checkerclasses,
            excludefilters=excluded_filters,
            errorhandler=self.filtererrorhandler,
            languagecode=self.language.code,
        )

    @property
    def non_db_state(self):
        if not hasattr(self, "_non_db_state"):
            try:
                self._non_db_state = self._non_db_state_cache[self.id]
            except KeyError:
                self._non_db_state = TranslationProjectNonDBState(self)
                self._non_db_state_cache[
                    self.id] = TranslationProjectNonDBState(self)

        return self._non_db_state

    @property
    def disabled(self):
        return self.project.disabled

    @property
    def is_terminology_project(self):
        return self.project.checkstyle == "terminology"

    # # # # # # # # # # # # # #  Methods # # # # # # # # # # # # # # # # # # #

    def __str__(self):
        return self.pootle_path

    def __init__(self, *args, **kwargs):
        super(TranslationProject, self).__init__(*args, **kwargs)

    def save(self, *args, **kwargs):
        self.directory = self.language.directory.get_or_make_subdir(
            self.project.code)
        self.pootle_path = self.directory.pootle_path

        self.abs_real_path = get_translation_project_dir(
            self.language,
            self.project.get_real_path(),
            make_dirs=not self.directory.obsolete,
        )
        super(TranslationProject, self).save(*args, **kwargs)

    def delete(self, *args, **kwargs):
        directory = self.directory

        super(TranslationProject, self).delete(*args, **kwargs)
        directory.delete()

    def get_absolute_url(self):
        return reverse("pootle-tp-browse",
                       args=split_pootle_path(self.pootle_path)[:-1])

    def get_translate_url(self, **kwargs):
        return u"".join([
            reverse("pootle-tp-translate",
                    args=split_pootle_path(self.pootle_path)[:-1]),
            get_editor_filter(**kwargs),
        ])

    def filtererrorhandler(self, functionname, str1, str2, e):
        logging.error(u"Error in filter %s: %r, %r, %s", functionname, str1,
                      str2, e)
        return False

    def is_accessible_by(self, user):
        """Returns `True` if the current translation project is accessible
        by `user`.
        """
        if user.is_superuser:
            return True

        return self.project.code in Project.accessible_by_user(user)

    def update_from_disk(self, force=False, overwrite=False):
        """Update all stores to reflect state on disk.

        :return: `True` if any of the existing stores were updated.
            FIXME note: `scan_files()` doesn't report whether something
            changed or not, but it can obsolete dirs/stores. Hence if that
            happened the return value will be `False`, which is misleading.
        """
        changed = False

        logging.info(u"Scanning for new files in %s", self)
        # Create new, make obsolete in-DB stores to reflect state on disk
        self.scan_files()

        stores = self.stores.live().select_related("parent").exclude(file="")
        # Update store content from disk store
        for store in stores.iterator():
            changed = (store.updater.update_from_disk(force=force,
                                                      overwrite=overwrite)
                       or changed)

        # If this TP has no stores, cache should be updated forcibly.
        if not changed and stores.count() == 0:
            self.update_all_cache()

        return changed

    def sync(self, conservative=True, skip_missing=False, only_newer=True):
        """Sync unsaved work on all stores to disk"""
        stores = self.stores.live().exclude(file="").filter(state__gte=PARSED)
        for store in stores.select_related("parent").iterator():
            store.sync(
                update_structure=not conservative,
                conservative=conservative,
                skip_missing=skip_missing,
                only_newer=only_newer,
            )

    # # # TreeItem
    def get_children(self):
        return self.directory.children

    def get_parent(self):
        return self.project

    # # # /TreeItem

    def directory_exists_on_disk(self):
        """Checks if the actual directory for the translation project
        exists on disk.
        """
        return not does_not_exist(self.abs_real_path)

    def scan_files(self):
        """Scans the file system and returns a list of translation files.
        """
        from pootle_app.project_tree import add_files

        all_files = []
        new_files = []

        all_files, new_files, __ = add_files(
            self,
            self.real_path,
            self.directory,
        )

        return all_files, new_files

    ###########################################################################

    def gettermmatcher(self):
        """Returns the terminology matcher."""
        terminology_stores = Store.objects.none()
        mtime = None

        if not self.is_terminology_project:
            # Get global terminology first
            try:
                termproject = TranslationProject.objects.get_terminology_project(
                    self.language_id)
                mtime = termproject.get_cached_value(CachedMethods.MTIME)
                terminology_stores = termproject.stores.live()
            except TranslationProject.DoesNotExist:
                pass

        if mtime is None:
            return

        if mtime != self.non_db_state.termmatchermtime:
            from pootle_misc.match import Matcher

            self.non_db_state.termmatcher = Matcher(
                terminology_stores.iterator())
            self.non_db_state.termmatchermtime = mtime

        return self.non_db_state.termmatcher
Exemplo n.º 14
0
class Directory(models.Model):
    path = models.FilePathField(unique=True)

    def __str__(self):
        return self.path
Exemplo n.º 15
0
class TranslationProject(models.Model, CachedTreeItem):

    language = models.ForeignKey(Language, db_index=True)
    project = models.ForeignKey(Project, db_index=True)
    real_path = models.FilePathField(editable=False, null=True, blank=True)
    directory = models.OneToOneField(Directory, db_index=True, editable=False)
    pootle_path = models.CharField(max_length=255,
                                   null=False,
                                   unique=True,
                                   db_index=True,
                                   editable=False)
    creation_time = models.DateTimeField(auto_now_add=True,
                                         db_index=True,
                                         editable=False,
                                         null=True)

    _non_db_state_cache = LRUCachingDict(settings.PARSE_POOL_SIZE,
                                         settings.PARSE_POOL_CULL_FREQUENCY)

    objects = TranslationProjectManager()

    class Meta(object):
        unique_together = (('language', 'project'), ('project', 'language'))
        db_table = 'pootle_app_translationproject'

    @cached_property
    def code(self):
        return u'-'.join([self.language.code, self.project.code])

    @cached_property
    def data_tool(self):
        return data_tool.get(self.__class__)(self)

    # # # # # # # # # # # # # #  Properties # # # # # # # # # # # # # # # # # #

    @property
    def name(self):
        # TODO: See if `self.fullname` can be removed
        return self.fullname

    @property
    def fullname(self):
        return "%s [%s]" % (self.project.fullname, self.language.name)

    @property
    def abs_real_path(self):
        if self.real_path is not None:
            return absolute_real_path(self.real_path)

    @abs_real_path.setter
    def abs_real_path(self, value):
        if value is not None:
            self.real_path = relative_real_path(value)
        else:
            self.real_path = None

    @property
    def file_style(self):
        return self.project.get_treestyle()

    @property
    def checker(self):
        from translate.filters import checks
        # We do not use default Translate Toolkit checkers; instead use
        # our own one
        if settings.POOTLE_QUALITY_CHECKER:
            from pootle_misc.util import import_func
            checkerclasses = [import_func(settings.POOTLE_QUALITY_CHECKER)]
        else:
            checkerclasses = [
                checks.projectcheckers.get(self.project.checkstyle,
                                           checks.StandardChecker)
            ]

        return checks.TeeChecker(checkerclasses=checkerclasses,
                                 excludefilters=excluded_filters,
                                 errorhandler=self.filtererrorhandler,
                                 languagecode=self.language.code)

    @property
    def non_db_state(self):
        if not hasattr(self, "_non_db_state"):
            try:
                self._non_db_state = self._non_db_state_cache[self.id]
            except KeyError:
                self._non_db_state = TranslationProjectNonDBState(self)
                self._non_db_state_cache[self.id] = \
                    TranslationProjectNonDBState(self)

        return self._non_db_state

    @property
    def disabled(self):
        return self.project.disabled

    @property
    def is_terminology_project(self):
        return self.project.checkstyle == 'terminology'

    @property
    def is_template_project(self):
        return self == self.project.get_template_translationproject()

    # # # # # # # # # # # # # #  Methods # # # # # # # # # # # # # # # # # # #

    def __unicode__(self):
        return self.pootle_path

    def __init__(self, *args, **kwargs):
        super(TranslationProject, self).__init__(*args, **kwargs)

    def save(self, *args, **kwargs):
        self.directory = self.language.directory \
                                      .get_or_make_subdir(self.project.code)
        self.pootle_path = self.directory.pootle_path

        if self.project.treestyle != 'pootle_fs':
            from pootle_app.project_tree import get_translation_project_dir
            self.abs_real_path = get_translation_project_dir(
                self.language,
                self.project,
                self.file_style,
                make_dirs=not self.directory.obsolete)
        else:
            self.abs_real_path = None
        super(TranslationProject, self).save(*args, **kwargs)

    def delete(self, *args, **kwargs):
        directory = self.directory

        super(TranslationProject, self).delete(*args, **kwargs)
        directory.delete()

    def get_absolute_url(self):
        return reverse('pootle-tp-browse',
                       args=split_pootle_path(self.pootle_path)[:-1])

    def get_translate_url(self, **kwargs):
        return u''.join([
            reverse("pootle-tp-translate",
                    args=split_pootle_path(self.pootle_path)[:-1]),
            get_editor_filter(**kwargs)
        ])

    def get_announcement(self, user=None):
        """Return the related announcement, if any."""
        return StaticPage.get_announcement_for(self.pootle_path, user)

    def filtererrorhandler(self, functionname, str1, str2, e):
        logging.error(u"Error in filter %s: %r, %r, %s", functionname, str1,
                      str2, e)
        return False

    def is_accessible_by(self, user):
        """Returns `True` if the current translation project is accessible
        by `user`.
        """
        if user.is_superuser:
            return True

        return self.project.code in Project.accessible_by_user(user)

    def can_be_inited_from_templates(self):
        """Returns `True` if the current translation project hasn't been
        saved yet and can be initialized from templates.
        """

        # This method checks if the current translation project directory
        # doesn't exist. So it won't work if the translation project is already
        # saved the database because the translation project directory is
        # auto-created in `save()` method.
        template_tp = self.project.get_template_translationproject()
        return (not self.is_template_project and template_tp is not None
                and not translation_project_dir_exists(self.language,
                                                       self.project))

    def init_from_templates(self):
        """Initializes the current translation project files using
        the templates TP ones.
        """

        template_tp = self.project.get_template_translationproject()
        template_stores = template_tp.stores.live().exclude(file="")

        for template_store in template_stores.iterator():
            init_store_from_template(self, template_store)

        self.update_from_disk()

    def update_from_disk(self, force=False, overwrite=False):
        """Update all stores to reflect state on disk."""
        changed = False

        logging.info(u"Scanning for new files in %s", self)
        # Create new, make obsolete in-DB stores to reflect state on disk
        self.scan_files()

        stores = self.stores.live().select_related('parent').exclude(file='')
        # Update store content from disk store
        for store in stores.iterator():
            if not store.file:
                continue
            disk_mtime = store.get_file_mtime()
            if not force and disk_mtime == store.file_mtime:
                # The file on disk wasn't changed since the last sync
                logging.debug(
                    u"File didn't change since last sync, "
                    u"skipping %s", store.pootle_path)
                continue

            changed = (store.updater.update_from_disk(overwrite=overwrite)
                       or changed)

        return changed

    def sync(self, conservative=True, skip_missing=False, only_newer=True):
        """Sync unsaved work on all stores to disk"""
        stores = self.stores.live().exclude(file='').filter(state__gte=PARSED)
        for store in stores.select_related("parent").iterator():
            store.sync(update_structure=not conservative,
                       conservative=conservative,
                       skip_missing=skip_missing,
                       only_newer=only_newer)

    # # # TreeItem
    def get_children(self):
        return self.directory.children

    def get_parents(self):
        return [self.project]

    # # # /TreeItem

    def directory_exists_on_disk(self):
        """Checks if the actual directory for the translation project
        exists on disk.
        """
        return not does_not_exist(self.abs_real_path)

    def scan_files(self):
        """Scans the file system and returns a list of translation files.
        """
        projects = [p.strip() for p in self.project.ignoredfiles.split(',')]
        ignored_files = set(projects)

        filetypes = self.project.filetype_tool
        exts = filetypes.filetype_extensions

        # Scan for pots if template project
        if self.is_template_project:
            exts = filetypes.template_extensions

        from pootle_app.project_tree import (add_files,
                                             match_template_filename,
                                             direct_language_match_filename)

        all_files = []
        new_files = []

        if self.file_style == 'gnu':
            if self.pootle_path.startswith('/templates/'):
                file_filter = lambda filename: match_template_filename(
                    self.project,
                    filename,
                )
            else:
                file_filter = lambda filename: direct_language_match_filename(
                    self.language.code,
                    filename,
                )
        else:
            file_filter = lambda filename: True

        all_files, new_files, __ = add_files(
            self,
            ignored_files,
            exts,
            self.real_path,
            self.directory,
            file_filter,
        )

        return all_files, new_files

    ###########################################################################

    def gettermmatcher(self):
        """Returns the terminology matcher."""
        terminology_stores = Store.objects.none()
        mtime = None

        if not self.is_terminology_project:
            # Get global terminology first
            try:
                termproject = TranslationProject.objects \
                    .get_terminology_project(self.language_id)
                mtime = termproject.data.max_unit_mtime
                terminology_stores = termproject.stores.live()
            except TranslationProject.DoesNotExist:
                pass

            local_terminology = self.stores.live().filter(
                name__startswith='pootle-terminology')
            for store in local_terminology.iterator():
                if mtime is None:
                    mtime = store.data.max_unit_mtime
                else:
                    mtime = max(mtime, store.data.max_unit_mtime)

            terminology_stores = terminology_stores | local_terminology

        if mtime is None:
            return

        if mtime != self.non_db_state.termmatchermtime:
            from pootle_misc.match import Matcher
            self.non_db_state.termmatcher = Matcher(
                terminology_stores.iterator())
            self.non_db_state.termmatchermtime = mtime

        return self.non_db_state.termmatcher
Exemplo n.º 16
0
class Migration(migrations.Migration):

    initial = True

    dependencies = [
        ('core', '0027_donation'),
    ]

    operations = [
        migrations.CreateModel(
            name='BeatTheBullshitItem',
            fields=[
                ('participationitem_ptr',
                 models.OneToOneField(
                     auto_created=True,
                     on_delete=django.db.models.deletion.CASCADE,
                     parent_link=True,
                     primary_key=True,
                     serialize=False,
                     to='core.ParticipationItem')),
            ],
            bases=('core.participationitem', ),
        ),
        migrations.CreateModel(
            name='BeatTheBullshitProject',
            fields=[
                ('participationproject_ptr',
                 models.OneToOneField(
                     auto_created=True,
                     on_delete=django.db.models.deletion.CASCADE,
                     parent_link=True,
                     primary_key=True,
                     serialize=False,
                     to='core.ParticipationProject')),
                ('topic_overview', models.TextField()),
            ],
            bases=('core.participationproject', ),
        ),
        migrations.CreateModel(
            name='Fallacy',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=500)),
                ('description', models.TextField()),
                ('example_context', models.TextField()),
                ('example', models.TextField()),
                ('improvement', models.TextField()),
            ],
        ),
        migrations.CreateModel(
            name='Quote',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('quote_string', models.TextField()),
                ('speaker_name', models.CharField(max_length=500)),
                ('reference', models.URLField()),
                ('screenshot_filename',
                 models.FilePathField(blank=True, max_length=500)),
                ('project',
                 models.ForeignKey(
                     on_delete=django.db.models.deletion.CASCADE,
                     to='beat_the_bullshit.BeatTheBullshitProject')),
            ],
        ),
        migrations.CreateModel(
            name='QuoteFallacyAssociation',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('explanation', models.TextField()),
                ('improvement', models.TextField()),
                ('fallacy',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='beat_the_bullshit.Fallacy')),
                ('quote',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='beat_the_bullshit.Quote')),
            ],
        ),
    ]
Exemplo n.º 17
0
class Project(models.Model):
    title = models.CharField(max_length=100)
    description = models.TextField()
    technology = models.CharField(max_length=20)
    image = models.FilePathField(path="/img")
Exemplo n.º 18
0
class Migration(migrations.Migration):

    initial = True

    dependencies = []

    operations = [
        migrations.CreateModel(
            name='Book',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=200)),
            ],
        ),
        migrations.CreateModel(
            name='Category',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=200)),
                ('image', models.FilePathField()),
            ],
        ),
        migrations.CreateModel(
            name='Chapter',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=200)),
                ('book',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='api.Book')),
            ],
        ),
        migrations.CreateModel(
            name='SubChapter',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=200)),
                ('content', models.TextField()),
                ('chapter',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='api.Chapter')),
            ],
        ),
        migrations.CreateModel(
            name='BookImage',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=200)),
                ('path', models.FilePathField()),
                ('book',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='api.Book')),
            ],
        ),
        migrations.AddField(
            model_name='book',
            name='categories',
            field=models.ManyToManyField(to='api.Category'),
        ),
    ]
Exemplo n.º 19
0
class Feed(models.Model):
    STIP_SNS_USER_NAME_PREFIX = const.STIP_SNS_USER_NAME_KEY + ': '
    STIP_SNS_SCREEN_NAME_PREFIX = const.STIP_SNS_SCREEN_NAME_KEY + ': '
    STIP_SNS_AFFILIATION_PREFIX = const.STIP_SNS_AFFILIATION_KEY + ': '
    STIP_SNS_REGION_CODE_PREFIX = const.STIP_SNS_REGION_CODE_KEY + ': '
    STIP_SNS_CI_PREFIX = const.STIP_SNS_CI_KEY + ': '
    STIP_SNS_REFERRED_URL_PREFIX = const.STIP_SNS_REFERRED_URL_KEY + ': '
    STIP_SNS_STIX2_PACKAGE_ID_PREFIX = const.STIP_SNS_STIX2_PACKAGE_ID_KEY + ': '

    package_id = models.CharField(max_length=128, default='', primary_key=True)
    user = models.ForeignKey(STIPUser)
    date = models.DateTimeField(null=True)
    post = models.TextField(max_length=1024)
    likes = models.IntegerField(default=0)
    comments = models.IntegerField(default=0)
    files = models.ManyToManyField(AttachFile)
    title = models.TextField(max_length=1024, default=None, null=True)
    stix_file_path = models.FilePathField(max_length=1024, default=None, null=True)
    tlp = models.CharField(max_length=10, choices=const.TLP_CHOICES, default='AMBER')
    sharing_range_type = models.CharField(max_length=10, choices=const.SHARING_RANGE_CHOICES, default=const.SHARING_RANGE_TYPE_KEY_ALL)
    sharing_people = models.ManyToManyField(STIPUser, related_name='feed_sharing_people')
    sharing_group = models.ForeignKey(Group, default=None, null=True)
    filename_pk = models.CharField(max_length=128, default='undefined')
    screen_name = models.CharField(max_length=128, default='')
    screen_affiliation = models.CharField(max_length=50, default='')
    screen_instance = models.CharField(max_length=128, default='', null=True)
    is_valid_user = models.BooleanField(default='True')
    region_code = models.CharField(max_length=128, default='')
    country_code = models.TextField(max_length=8, default=None, null=True)
    administrative_code = models.TextField(max_length=8, default=None, null=True)
    ci = models.CharField(max_length=128, default='', null=True)
    referred_url = models.TextField(max_length=1024, default=None, null=True)
    stix2_package_id = models.CharField(max_length=128, default='', null=True)
    tmp_sharing_people = []
    build_cache_flag = False

    class Meta:
        verbose_name = _('Feed')
        verbose_name_plural = _('Feeds')
        ordering = ('-date',)
        db_table = 'stip_sns_feed'

    def linkfy_post(self):
        if isinstance(self.post, StructuredText):
            v = self.post.value
        else:
            v = self.post
        return bleach.linkify(v)

    def __str__(self):
        return self.post

    @staticmethod
    # 起動時に読み込み、Cacheを構築する
    def build_cache(api_user):
        packages_from_rs = rs.get_feeds_from_rs(
            api_user,
            index=0,
            size=-1)
        for package_from_rs in packages_from_rs:
            # RS 復帰の json を元に Feed 構築する
            Feed.get_feeds_from_package_from_rs(api_user, package_from_rs)

    @staticmethod
    # sharing_rangeによるフィルタを行ったquerysetを返却する
    def get_filter_query_set(feeds, request_user, feeds_=None):
        if feeds_ is None:
            return []

        l = []
        for feed_ in feeds_:
            # 自分の投稿ならリスト追加
            if request_user is not None:
                if request_user == feed_.user:
                    l.append(feed_)
                    continue
            # sharing_range_typeがallなら追加
            if feed_.sharing_range_type == const.SHARING_RANGE_TYPE_KEY_ALL:
                l.append(feed_)
                continue
            elif feed_.sharing_range_type == const.SHARING_RANGE_TYPE_KEY_GROUP:
                # sharing_group に ログインユーザが含まれていたら追加
                if request_user is not None:
                    # filter は Profile の user 検索ではなく STIPUser で行う
                    if len(feed_.sharing_group.members.filter(username=request_user)) == 1:
                        l.append(feed_)
                        continue
            elif feed_.sharing_range_type == const.SHARING_RANGE_TYPE_KEY_PEOPLE:
                # sharing_people に ログインユーザが含まれていたら追加
                if request_user is not None:
                    if request_user in feed_.sharing_people.all():
                        l.append(feed_)
                        continue
        return l

    @staticmethod
    # 指定の package_id を元に RS から STIX を取得し Feedを構築する
    def get_feeds_from_package_id(api_user, package_id):
        package_from_rs = rs.get_package_info_from_package_id(api_user, package_id)
        return Feed.get_feeds_from_package_from_rs(api_user, package_from_rs)

    @staticmethod
    # stix_package が S-TIP SNS 産?
    def is_stip_sns_stix_package(stix_package):
        try:
            for tool in stix_package.stix_header.information_source.tools:
                if (tool.name == const.SNS_TOOL_NAME) and (tool.vendor == const.SNS_TOOL_VENDOR):
                    return True
            return False
        except BaseException:
            return False

    @staticmethod
    def get_attach_stix_dir_path(stix_id):
        # ATTACH_FILE_DIR/{{attach_file_id}}/nameで保存する
        dir_name = rs.convert_package_id_to_filename(stix_id)
        return const.ATTACH_FILE_DIR + dir_name

    @staticmethod
    def get_attach_file_name(stix_id):
        af = AttachFile.objects.get(package_id=stix_id)
        return af.file_name

    @staticmethod
    def get_attach_file_path(stix_id):
        attachment_stix_dir = Feed.get_attach_stix_dir_path(stix_id)
        file_name = Feed.get_attach_file_name(stix_id)
        return attachment_stix_dir + os.sep + file_name

    @staticmethod
    def get_cached_file_path(feed_file_name_id):
        return const.STIX_CACHE_DIR + os.sep + feed_file_name_id

    @staticmethod
    # stix_id が SNS 作成のアタッチメント情報を含まない場合は None を返却する
    def get_attach_file(api_user, stix_id):
        attachment_stix_dir = Feed.get_attach_stix_dir_path(stix_id)
        if os.path.exists(attachment_stix_dir):
            # ここのファイル名とファイルパスを返却する 
            try:
                # 1dir 1file なので最初の要素をファイル名とする
                attach_file = AttachFile()
                attach_file.file_name = Feed.get_attach_file_name(stix_id)
                attach_file.package_id = stix_id
                attach_file.save()
                return attach_file
            except IndexError:
                # dir はあるが fileが存在しない場合に発生する
                # 処理は続行する
                pass
        else:
            # dir が存在しないので作成
            os.mkdir(attachment_stix_dir)

        attachement_cached_stix_file_path = rs.get_stix_file_path(api_user, stix_id)
        try:
            # Attachement STIX Package parse
            attachement_stix_package = STIXPackage.from_xml(attachement_cached_stix_file_path)
            # marking から file_name, content 取得
            file_name = None
            content = None
            try:
                markings = attachement_stix_package.stix_header.handling.marking
            except AttributeError:
                return None

            for marking in markings:
                marking_structure = marking.marking_structures[0]
                if isinstance(marking_structure, SimpleMarkingStructure):
                    statement = marking_structure.statement
                    if statement.startswith(const.MARKING_STRUCTURE_STIP_ATTACHEMENT_FILENAME_PREFIX):
                        file_name = statement[len(const.MARKING_STRUCTURE_STIP_ATTACHEMENT_FILENAME_PREFIX + ': '):]
                    elif statement.startswith(const.MARKING_STRUCTURE_STIP_ATTACHEMENT_CONTENT_PREFIX):
                        content_str = statement[len(const.MARKING_STRUCTURE_STIP_ATTACHEMENT_CONTENT_PREFIX + ': '):]
                        # content は base64 で decode する
                        content = base64.b64decode(content_str)
            if (file_name is None) or (content is None):
                return None
        except Exception as e:
            import traceback
            traceback.print_exc()
            raise e

        # ファイル保存
        file_path = attachment_stix_dir + os.sep + file_name
        file_path = file_path.encode('utf-8')
        with open(file_path, 'wb') as fp:
            fp.write(content)
        attach_file = AttachFile()
        attach_file.file_name = file_name
        attach_file.package_id = stix_id
        attach_file.save()
        return attach_file

    @staticmethod
    # string から datetime 型を取得する
    def get_datetime_from_string(s):
        try:
            return datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S.%f').replace(tzinfo=pytz.utc)
        except ValueError:
            return datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.utc)

    class StipInformationFromStix:
        def __init__(self):
            self.user_name = None
            self.screen_name = None
            self.affiliation = None
            self.instance = None
            self.is_sns = False
            self.region_code = None
            self.ci = None
            self.referred_url = None
            self.country_code = None
            self.administrative_code = None
            self.stix2_package_id = None

    # stix_package の user_name取得
    @staticmethod
    def get_stip_from_stix_package(stix_package):
        bean = Feed.StipInformationFromStix()
        bean.is_sns = Feed.is_stip_sns_stix_package(stix_package)
        try:
            for marking in stix_package.stix_header.handling:
                if isinstance(marking, MarkingSpecification):
                    marking_structure = marking.marking_structures[0]
                    if isinstance(marking_structure, SimpleMarkingStructure):
                        statement = marking_structure.statement
                        if statement.startswith(Feed.STIP_SNS_USER_NAME_PREFIX):
                            bean.user_name = statement[len(Feed.STIP_SNS_USER_NAME_PREFIX):]
                        if statement.startswith(Feed.STIP_SNS_SCREEN_NAME_PREFIX):
                            bean.screen_name = statement[len(Feed.STIP_SNS_SCREEN_NAME_PREFIX):]
                        if statement.startswith(Feed.STIP_SNS_AFFILIATION_PREFIX):
                            bean.affiliation = statement[len(Feed.STIP_SNS_AFFILIATION_PREFIX):]
                        if statement.startswith(Feed.STIP_SNS_REGION_CODE_PREFIX):
                            bean.region_code = statement[len(Feed.STIP_SNS_REGION_CODE_PREFIX):]
                        if statement.startswith(Feed.STIP_SNS_CI_PREFIX):
                            bean.ci = statement[len(Feed.STIP_SNS_CI_PREFIX):]
                        if statement.startswith(Feed.STIP_SNS_REFERRED_URL_PREFIX):
                            bean.referred_url = statement[len(Feed.STIP_SNS_REFERRED_URL_PREFIX):]
                        if statement.startswith(Feed.STIP_SNS_STIX2_PACKAGE_ID_PREFIX):
                            bean.stix2_package_id = statement[len(Feed.STIP_SNS_STIX2_PACKAGE_ID_PREFIX):]
                    # AISMarkingStructure から country_code と administrative_area を取得する
                    elif isinstance(marking_structure, AISMarkingStructure):
                        information_source = marking.information_source
                        identity = information_source.identity
                        specification = identity.specification
                        addresses = specification.addresses
                        address = addresses[0]
                        # country_code
                        country = address.country
                        name_elements = country.name_elements
                        name_element = name_elements[0]
                        country_code = name_element.name_code
                        bean.country_code = country_code
                        # administrative_area
                        administrative_area = address.administrative_area
                        name_elements = administrative_area.name_elements
                        name_element = name_elements[0]
                        administrative_code = name_element.name_code
                        bean.administrative_code = administrative_code

        except BaseException:
            pass
        try:
            bean.instance = stix_package.stix_header.information_source.identity.name
        except BaseException:
            pass
        return bean

    @staticmethod
    def set_screen_value_from_local_db(feed, bean):
        # 上書きロジックは stix と同一とする
        Feed.set_screen_value_from_stix(feed, bean)

    @staticmethod
    def set_screen_value_from_stix(feed, bean):
        stip_user = feed.user
        # STIX の情報から取得するが存在しない場合は db 格納値を用いる
        if bean.screen_name is not None and len(bean.screen_name) != 0:
            feed.screen_name = bean.screen_name
        else:
            feed.screen_name = stip_user.screen_name

        if bean.affiliation is not None and len(bean.affiliation) != 0:
            feed.screen_affiliation = bean.affiliation
        else:
            feed.screen_affiliation = stip_user.affiliation if stip_user.affiliation is not None else ''

        if bean.instance is not None and len(bean.instance) != 0:
            feed.screen_instance = bean.instance
        else:
            feed.screen_instance = ''

        if bean.country_code is not None and len(bean.country_code) != 0:
            feed.country_code = bean.country_code
        else:
            feed.country_code = stip_user.country_code

        if bean.administrative_code is not None and len(bean.administrative_code) != 0:
            feed.administrative_code = bean.administrative_code
        else:
            feed.administrative_code = stip_user.administrative_code
        return

    @staticmethod
    # N/A アカウントを取得する
    def get_na_account():
        return STIPUser.objects.get(username=const.SNS_NA_ACCOUNT)

    # cache 作成
    @staticmethod
    def create_feeds_record(api_user, package_id, uploader_id, produced_str):
        # RS から取得した STIX から stix_package 取得する
        stix_file_path = rs.get_stix_file_path(api_user, package_id)
        stix_package = STIXPackage.from_xml(stix_file_path, encoding='utf-8')

        # Feed情報を STIX,RS の API から取得する
        feed = Feed()
        feed.package_id = package_id
        feed.filename_pk = rs.convert_package_id_to_filename(package_id)

        # STIX から表示情報を取得する
        bean = Feed.get_stip_from_stix_package(stix_package)
        if bean.is_sns:
            # SNS 産 STIX である
            if bean.instance == SNSConfig.get_sns_identity_name():
                # 現在稼働中インスタンスと同一
                try:
                    # STIX 定義の username が存在する
                    feed.user = STIPUser.objects.get(username=bean.user_name)
                    # 表示はローカル DB から取得する
                    Feed.set_screen_value_from_local_db(feed, bean)
                except BaseException:
                    # STIX 定義の username が存在しない → N/A アカウント
                    feed.user = Feed.get_na_account()
                    # 表示はSTIX File から取得する
                    Feed.set_screen_value_from_stix(feed, bean)
                    # すでにユーザーが削除されている
                    feed.is_valid_user = False
            else:
                # 現在稼働中インスタンスと異なる
                try:
                    # インスタンス名と同じアカウント
                    feed.user = STIPUser.objects.get(username=bean.instance)
                    # 表示はSTIX File から取得する
                    Feed.set_screen_value_from_stix(feed, bean)
                except BaseException:
                    # インスタンス名と同じアカウントが存在しない → N/A アカウント
                    feed.user = Feed.get_na_account()
                    # 表示はSTIX File から取得する
                    Feed.set_screen_value_from_stix(feed, bean)
        else:
            # SNS 産 STIX ではない
            if bean.instance is not None:
                # instance がある
                instance_user_name = bean.instance.replace(' ', '')
                try:
                    # インスタンス名と同じアカウント
                    feed.user = STIPUser.objects.get(username=instance_user_name)
                    # 表示はローカル DB から取得する
                    Feed.set_screen_value_from_local_db(feed, bean)
                except BaseException:
                    # インスタンス名と同じアカウントが存在しない → N/A アカウント
                    feed.user = Feed.get_na_account()
                    # 表示は bean.instance [bean.instance]
                    feed.screen_name = bean.instance
                    feed.screen_instance = bean.instance
            else:
                # instance がない
                # N/A アカウント
                feed.user = Feed.get_na_account()
                # 表示はローカル DB(N/Aアカウント) から取得する
                Feed.set_screen_value_from_local_db(feed, bean)

        feed.date = Feed.get_datetime_from_string(produced_str)
        feed.post = stix_package.stix_header.description
        if feed.post is None:
            feed.post = ''

        # Attachement Files 情報取得
        if Feed.is_stip_sns_stix_package(stix_package):
            # S-TIP SNS 作成 STIX
            if stix_package.related_packages is not None:
                # 一度 feed をsave()する
                feed.save()
                # Related_packages は SNS STIX 以外の可能性もある
                for related_package in stix_package.related_packages:
                    # attachement は attachdirにいれるべきその時のファイル名は attachment_stix_idであるべき
                    attach_file = Feed.get_attach_file(api_user, related_package.item.id_)
                    # attach_file が None の場合は Attach File ではない
                    if attach_file is None:
                        continue
                    feed.files.add(attach_file)
                feed.save()

        feed.title = stix_package.stix_header.title
        feed.stix_file_path = stix_file_path
        try:
            uploader_stipuser = STIPUser.objects.get(id=uploader_id)
            feed.tlp = Feed.get_ais_tlp_from_stix_header(stix_package.stix_header, uploader_stipuser.tlp)
            if feed.tlp is None:
                # 取得ができなかった場合は default TLP の AMBER
                feed.tlp = 'AMBER'
        except BaseException:
            # uploader_profile が存在しない場合は default TLP の AMBER
            feed.tlp = 'AMBER'
        sharing_range_info = Feed.get_sharing_range_from_stix_header(stix_package.stix_header)
        if isinstance(sharing_range_info, list):
            # sharing_range_info の中は STIPUser list
            feed.sharing_range_type = const.SHARING_RANGE_TYPE_KEY_PEOPLE
            feed.save()
            for stip_user in sharing_range_info:
                feed.sharing_people.add(stip_user)
        elif isinstance(sharing_range_info, Group):
            feed.sharing_range_type = const.SHARING_RANGE_TYPE_KEY_GROUP
            feed.sharing_group = sharing_range_info
        else:
            feed.sharing_range_type = const.SHARING_RANGE_TYPE_KEY_ALL
        # feed.package_id = package_id
        if bean.region_code is not None:
            feed.region_code = bean.region_code
        else:
            if feed.user.region is not None:
                feed.region_code = feed.user.region.code
            else:
                feed.region_code = ''
        if bean.ci is not None:
            feed.ci = bean.ci
        else:
            feed.ci = feed.user.ci
        if bean.referred_url is not None:
            feed.referred_url = bean.referred_url
        if bean.stix2_package_id is not None:
            feed.stix2_package_id = bean.stix2_package_id
        feed.save()
        return feed

    # rs の API の復帰値の json から Feedを取得する
    @staticmethod
    def get_feeds_from_package_from_rs(api_user, package_from_rs):
        package_id = package_from_rs['package_id']
        uploader_id = package_from_rs['uploader']
        produced_str = package_from_rs['produced']

        try:
            # cache にあれば採用する
            feed = Feed.objects.get(package_id=package_id)
            # STIX の instance がこの稼働している instance と同じであるかチェック
            if feed.screen_instance is not None:
                if feed.screen_instance == SNSConfig.get_sns_identity_name():
                    # feed.user の現在の affiliation/screen_name/ci/region_codeを使用する
                    feed.screen_name = feed.user.screen_name
                    feed.screen_affiliation = feed.user.affiliation
                    feed.ci = feed.user.ci
                    if feed.user.region is not None:
                        feed.region_code = feed.user.region.code
        except Feed.DoesNotExist as e:
            # cache 作成
            feed = Feed.create_feeds_record(api_user, package_id, uploader_id, produced_str)
        except Exception as e:
            import traceback
            traceback.print_exc()
            raise e
        return feed

    @staticmethod
    def add_like_comment_info(api_user, feed):
        # like, comment の情報は リアルタイム更新のため都度取得する
        # likes 数を RS から取得
        likers = rs.get_likers_from_rs(api_user, feed.package_id)
        feed.likes = len(likers)

        # like status 取得
        mylike = '%s %s' % (SNSConfig.get_sns_identity_name(), api_user)
        feed.like = mylike in likers

        # comment 数を RS から取得
        feed.comments = len(rs.get_comment_from_rs(api_user, feed.package_id))
        # feed.save()
        return feed

    @staticmethod
    # RS に query をかける
    def query(
            api_user=None,
            query_string=''):
        feeds_ = []
        # RS に queryする
        packages_from_rs = rs.query(api_user, query_string)
        for package_from_rs in packages_from_rs:
            feed = Feed.get_feeds_from_package_from_rs(api_user, package_from_rs)
            feeds_.append(feed)
        return feeds_

    @staticmethod
    def get_feeds(
            api_user=None,
            last_reload=None,  # last_reload 指定の場合はこの時間を最新として、それより古い投稿を探す (時間ピッタリは含まない)
            last_feed_datetime=None,  # last_feed_datetime 指定の場合は、この時間を起点とし、新しい投稿を探す (時間ピッタリは含まない)
            range_small_datetime=None,  # 期間範囲指定の小さい方(古い方)。この時間を含む
            range_big_datetime=None,  # 期間範囲指定の大きい方(新しい方)。この時間を含む
            index=0,
            size=-1,
            user_id=None):

        # Feed cache 作成 (起動時初回のみ cache 作成を行う)
        if not Feed.build_cache_flag:
            Feed.build_cache(api_user)
            Feed.build_cache_flag = True

        # rs から取得
        if last_feed_datetime is None:
            # start_time を最新として古い投稿を取得
            packages_from_rs = rs.get_feeds_from_rs(
                api_user,
                start_time=last_reload,
                user_id=user_id,
                range_small_datetime=range_small_datetime,
                range_big_datetime=range_big_datetime,
                index=index,
                size=size)
        else:
            # last_feed_datetime より新しい投稿を取得
            packages_from_rs = rs.get_feeds_from_rs(
                api_user,
                last_feed_datetime=last_feed_datetime,
                user_id=user_id,
                range_small_datetime=range_small_datetime,
                range_big_datetime=range_big_datetime,
                index=index,
                size=size)

        feeds_ = []
        for package_from_rs in packages_from_rs:
            # RS 復帰の json を元に Feed 構築する
            feed = Feed.get_feeds_from_package_from_rs(api_user, package_from_rs)
            # 条件を満たした feed を追加
            feeds_.append(feed)
        return Feed.get_filter_query_set(None, api_user, feeds_=feeds_)

    # stix_header から AIS の TLP を返却する。該当箇所がない場合はdefault_tlpを返却
    @staticmethod
    def get_ais_tlp_from_stix_header(stix_header, default_tlp='AMBER'):
        try:
            for marking in stix_header.handling.marking:
                marking_strucutre = marking.marking_structures[0]
                if isinstance(marking_strucutre, AISMarkingStructure):
                    return marking_strucutre.not_proprietary.tlp_marking.color
        except BaseException:
            pass
        return default_tlp

    # stix_header から sharing_rangeを返却
    # group 指定の場合はGroupインスタンス
    # people 指定の場合は文字列のリスト
    # それ以外の場合はNoneを返却する
    @staticmethod
    def get_sharing_range_from_stix_header(stix_header):
        SHARING_RANGE_PREFIX = 'Sharing Range:'
        SHARING_RANGE_ALL_VALUE = 'Sharing Range: CIC Community'
        SHARING_RANGE_GROUP_PREFIX = 'Sharing Range: Group: '
        SHARING_RANGE_PEOPLE_PREFIX = 'Sharing Range: People: '
        try:
            for marking in stix_header.handling.marking:
                marking_structure = marking.marking_structures[0]
                if isinstance(marking_structure, SimpleMarkingStructure):
                    statement = marking_structure.statement
                    if not statement.startswith(SHARING_RANGE_PREFIX):
                        continue
                    if statement == SHARING_RANGE_ALL_VALUE:
                        # Sharing Range指定 が ALL
                        return None
                    if statement.startswith(SHARING_RANGE_GROUP_PREFIX):
                        # Sharing Range指定 が Group
                        group_name = statement[len(SHARING_RANGE_GROUP_PREFIX):]
                        group = Group.objects.get(en_name=group_name)
                        return group
                    if statement.startswith(SHARING_RANGE_PEOPLE_PREFIX):
                        # Sharing Range指定 が People
                        people_str = statement[len(SHARING_RANGE_GROUP_PREFIX):]
                        people_list = []
                        for p_str in people_str.split(','):
                            people = STIPUser.objects.get(username=p_str.strip())
                            people_list.append(people)
                        return people_list

        except BaseException:
            pass
        # 指定がない。None
        return None

    @staticmethod
    def get_feeds_after(last_feed_datetime, api_user=None, user_id=None):
        feeds_ = Feed.get_feeds(last_feed_datetime=last_feed_datetime, api_user=api_user, user_id=user_id)
        return Feed.get_filter_query_set(None, api_user, feeds_=feeds_)
Exemplo n.º 20
0
class Migration(migrations.Migration):

    initial = True

    dependencies = [
        ('contenttypes', '0002_remove_content_type_name'),
        migrations.swappable_dependency(settings.AUTH_USER_MODEL),
    ]

    operations = [
        migrations.CreateModel(
            name='Association',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('d2d',
                 models.FloatField(
                     default=0.0,
                     help_text=
                     'astronomical distance calculated by Astropy, arcsec.')),
                ('dr',
                 models.FloatField(
                     default=0.0,
                     help_text=
                     'De Ruiter radius calculated in advanced association.')),
            ],
        ),
        migrations.CreateModel(
            name='Band',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=12, unique=True)),
                ('frequency',
                 models.FloatField(
                     help_text='central frequency of band (integer MHz)')),
                ('bandwidth', models.FloatField(help_text='bandwidth (MHz)')),
            ],
            options={
                'ordering': ['frequency'],
            },
        ),
        migrations.CreateModel(
            name='CrossMatch',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('manual', models.BooleanField()),
                ('distance', models.FloatField()),
                ('probability', models.FloatField()),
                ('comment',
                 models.TextField(blank=True, default='', max_length=1000)),
            ],
        ),
        migrations.CreateModel(
            name='Image',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('measurements_path',
                 models.FilePathField(
                     db_column='meas_path',
                     help_text=
                     'the path to the measurements parquet that belongs to this image',
                     max_length=200)),
                ('polarisation',
                 models.CharField(
                     choices=[('I', 'I'),
                              ('XX', 'XX'), ('YY', 'YY'), ('Q', 'Q'),
                              ('U', 'U'), ('V', 'V')],
                     help_text=
                     'Polarisation of the image one of I,XX,YY,Q,U,V.',
                     max_length=2)),
                ('name',
                 models.CharField(help_text='Name of the image.',
                                  max_length=200,
                                  unique=True)),
                ('path',
                 models.FilePathField(
                     help_text='Path to the file containing the image.',
                     max_length=500)),
                ('noise_path',
                 models.FilePathField(
                     blank=True,
                     default='',
                     help_text='Path to the file containing the RMS image.',
                     max_length=300)),
                ('background_path',
                 models.FilePathField(
                     blank=True,
                     default='',
                     help_text=
                     'Path to the file containing the background image.',
                     max_length=300)),
                ('datetime',
                 models.DateTimeField(
                     help_text='Date/time of observation or epoch.')),
                ('jd',
                 models.FloatField(
                     help_text='Julian date of the observation (days).')),
                ('duration',
                 models.FloatField(default=0.0,
                                   help_text='Duration of the observation.')),
                ('ra',
                 models.FloatField(help_text='RA of the image centre (Deg).')),
                ('dec',
                 models.FloatField(
                     help_text='DEC of the image centre (Deg).')),
                ('fov_bmaj',
                 models.FloatField(
                     help_text='Field of view major axis (Deg).')),
                ('fov_bmin',
                 models.FloatField(
                     help_text='Field of view minor axis (Deg).')),
                ('physical_bmaj',
                 models.FloatField(
                     help_text='The actual size of the image major axis (Deg).'
                 )),
                ('physical_bmin',
                 models.FloatField(
                     help_text='The actual size of the image minor axis (Deg).'
                 )),
                ('radius_pixels',
                 models.FloatField(
                     help_text=
                     'Radius of the useable region of the image (pixels).')),
                ('beam_bmaj',
                 models.FloatField(
                     help_text='Major axis of image restoring beam (Deg).')),
                ('beam_bmin',
                 models.FloatField(
                     help_text='Minor axis of image restoring beam (Deg).')),
                ('beam_bpa',
                 models.FloatField(help_text='Beam position angle (Deg).')),
                ('rms_median',
                 models.FloatField(
                     help_text=
                     'Background average RMS from the provided RMS map (mJy).')
                 ),
                ('rms_min',
                 models.FloatField(
                     help_text=
                     'Background minimum RMS from the provided RMS map (mJy).')
                 ),
                ('rms_max',
                 models.FloatField(
                     help_text=
                     'Background maximum RMS from the provided RMS map (mJy).')
                 ),
                ('band',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='vast_pipeline.Band')),
            ],
            options={
                'ordering': ['datetime'],
            },
        ),
        migrations.CreateModel(
            name='Measurement',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=64, unique=True)),
                ('ra', models.FloatField(help_text='RA of the source (Deg).')),
                ('ra_err',
                 models.FloatField(help_text='RA error of the source (Deg).')),
                ('dec',
                 models.FloatField(help_text='DEC of the source (Deg).')),
                ('dec_err',
                 models.FloatField(
                     help_text='DEC error of the source (Deg).')),
                ('bmaj',
                 models.FloatField(
                     help_text=
                     'The major axis of the Gaussian fit to the source (Deg).')
                 ),
                ('err_bmaj',
                 models.FloatField(help_text='Error major axis (Deg).')),
                ('bmin',
                 models.FloatField(
                     help_text=
                     'The minor axis of the Gaussian fit to the source (Deg).')
                 ),
                ('err_bmin',
                 models.FloatField(help_text='Error minor axis (Deg).')),
                ('pa',
                 models.FloatField(
                     help_text=
                     'Position angle of Gaussian fit east of north to bmaj (Deg).'
                 )),
                ('err_pa',
                 models.FloatField(help_text='Error position angle (Deg).')),
                ('ew_sys_err',
                 models.FloatField(
                     help_text=
                     'Systematic error in east-west (RA) direction (Deg).')),
                ('ns_sys_err',
                 models.FloatField(
                     help_text=
                     'Systematic error in north-south (dec) direction (Deg).')
                 ),
                ('error_radius',
                 models.FloatField(
                     help_text=
                     'Estimate of maximum error radius using ra_err and dec_err (Deg).'
                 )),
                ('uncertainty_ew',
                 models.FloatField(
                     help_text=
                     'Total east-west (RA) uncertainty, quadratic sum of error_radius and ew_sys_err (Deg).'
                 )),
                ('uncertainty_ns',
                 models.FloatField(
                     help_text=
                     'Total north-south (Dec) uncertainty, quadratic sum of error_radius and ns_sys_err (Deg).'
                 )),
                ('flux_int', models.FloatField()),
                ('flux_int_err', models.FloatField()),
                ('flux_int_isl_ratio',
                 models.FloatField(
                     help_text=
                     'Ratio of the component integrated flux to the total island integrated flux.'
                 )),
                ('flux_peak', models.FloatField()),
                ('flux_peak_err', models.FloatField()),
                ('flux_peak_isl_ratio',
                 models.FloatField(
                     help_text=
                     'Ratio of the component peak flux to the total island peak flux.'
                 )),
                ('chi_squared_fit',
                 models.FloatField(
                     db_column='chi2_fit',
                     help_text='Chi-squared of the Guassian fit to the source.'
                 )),
                ('spectral_index',
                 models.FloatField(
                     db_column='spectr_idx',
                     help_text='In-band Selavy spectral index.')),
                ('spectral_index_from_TT',
                 models.BooleanField(
                     db_column='spectr_idx_tt',
                     default=False,
                     help_text=
                     'True/False if the spectral index came from the taylor term.'
                 )),
                ('local_rms',
                 models.FloatField(help_text='Local rms in mJy from Selavy.')),
                ('snr',
                 models.FloatField(
                     help_text='Signal-to-noise ratio of the measurement.')),
                ('flag_c4',
                 models.BooleanField(default=False,
                                     help_text='Fit flag from Selavy.')),
                ('compactness',
                 models.FloatField(help_text='Int flux over peak flux.')),
                ('has_siblings',
                 models.BooleanField(
                     default=False,
                     help_text=
                     'True if the fit comes from an island that has more than 1 component.'
                 )),
                ('component_id',
                 models.CharField(
                     help_text=
                     'The ID of the component from which the source comes from.',
                     max_length=64)),
                ('island_id',
                 models.CharField(
                     help_text=
                     'The ID of the island from which the source comes from.',
                     max_length=64)),
                ('forced',
                 models.BooleanField(
                     default=False,
                     help_text='True: the measurement is forced extracted.')),
                ('image',
                 models.ForeignKey(null=True,
                                   on_delete=django.db.models.deletion.CASCADE,
                                   to='vast_pipeline.Image')),
            ],
            options={
                'ordering': ['ra'],
            },
        ),
        migrations.CreateModel(
            name='RelatedSource',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
            ],
        ),
        migrations.CreateModel(
            name='Run',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name',
                 models.CharField(
                     help_text='name of the pipeline run',
                     max_length=64,
                     unique=True,
                     validators=[
                         django.core.validators.RegexValidator(
                             inverse_match=True,
                             message='Name contains not allowed characters!',
                             regex='[\\[@!#$%^&*()<>?/\\|}{~:\\] ]')
                     ])),
                ('description',
                 models.CharField(
                     blank=True,
                     help_text='A short description of the pipeline run.',
                     max_length=240)),
                ('time',
                 models.DateTimeField(
                     auto_now=True, help_text='Datetime of a pipeline run.')),
                ('path',
                 models.FilePathField(help_text='path to the pipeline run',
                                      max_length=200)),
                ('status',
                 models.CharField(choices=[('INI', 'Initialised'),
                                           ('QUE', 'Queued'),
                                           ('RUN', 'Running'),
                                           ('END', 'Completed'),
                                           ('ERR', 'Error'),
                                           ('RES', 'Restoring')],
                                  default='INI',
                                  help_text='Status of the pipeline run.',
                                  max_length=3)),
                ('n_images',
                 models.IntegerField(
                     default=0,
                     help_text='number of images processed in this run')),
                ('n_sources',
                 models.IntegerField(
                     default=0,
                     help_text='number of sources extracted in this run')),
                ('n_selavy_measurements',
                 models.IntegerField(
                     default=0,
                     help_text='number of selavy measurements in this run')),
                ('n_forced_measurements',
                 models.IntegerField(
                     default=0,
                     help_text='number of forced measurements in this run')),
                ('epoch_based',
                 models.BooleanField(
                     default=False,
                     help_text=
                     'Whether the run was processed using epoch based association, i.e. the user passed in groups of images defining epochs rather than every image being treated individually.'
                 )),
                ('user',
                 models.ForeignKey(
                     blank=True,
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to=settings.AUTH_USER_MODEL)),
            ],
            options={
                'ordering': ['name'],
            },
        ),
        migrations.CreateModel(
            name='Source',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=100)),
                ('new',
                 models.BooleanField(default=False, help_text='New Source.')),
                ('wavg_ra',
                 models.FloatField(
                     help_text='The weighted average right ascension (Deg).')),
                ('wavg_dec',
                 models.FloatField(
                     help_text='The weighted average declination (Deg).')),
                ('wavg_uncertainty_ew',
                 models.FloatField(
                     help_text=
                     'The weighted average uncertainty in the east-west (RA) direction (Deg).'
                 )),
                ('wavg_uncertainty_ns',
                 models.FloatField(
                     help_text=
                     'The weighted average uncertainty in the north-south (Dec) direction (Deg).'
                 )),
                ('avg_flux_int',
                 models.FloatField(
                     help_text='The average integrated flux value.')),
                ('avg_flux_peak',
                 models.FloatField(help_text='The average peak flux value.')),
                ('max_flux_peak',
                 models.FloatField(help_text='The maximum peak flux value.')),
                ('min_flux_peak',
                 models.FloatField(help_text='The minimum peak flux value.')),
                ('max_flux_int',
                 models.FloatField(
                     help_text='The maximum integrated flux value.')),
                ('min_flux_int',
                 models.FloatField(
                     help_text='The minimum integrated flux value.')),
                ('min_flux_int_isl_ratio',
                 models.FloatField(
                     help_text='The minimum integrated island flux ratio value.'
                 )),
                ('min_flux_peak_isl_ratio',
                 models.FloatField(
                     help_text='The minimum peak island flux ratio value.')),
                ('avg_compactness',
                 models.FloatField(help_text='The average compactness.')),
                ('min_snr',
                 models.FloatField(
                     help_text=
                     'The minimum signal-to-noise ratio value of the detections.'
                 )),
                ('max_snr',
                 models.FloatField(
                     help_text=
                     'The maximum signal-to-noise ratio value of the detections.'
                 )),
                ('v_int',
                 models.FloatField(help_text='V metric for int flux.')),
                ('v_peak',
                 models.FloatField(help_text='V metric for peak flux.')),
                ('eta_int',
                 models.FloatField(help_text='Eta metric for int flux.')),
                ('eta_peak',
                 models.FloatField(help_text='Eta metric for peak flux.')),
                ('new_high_sigma',
                 models.FloatField(
                     help_text=
                     'The largest sigma value for the new source if it was placed in previous image.'
                 )),
                ('n_neighbour_dist',
                 models.FloatField(
                     help_text='Distance to the nearest neighbour (deg)')),
                ('vs_abs_significant_max_int',
                 models.FloatField(
                     default=0.0,
                     help_text=
                     'Maximum value of all measurement pair variability t-statistics for int flux that exceed SOURCE_AGGREGATE_PAIR_METRICS_MIN_ABS_VS in the pipeline run configuration.'
                 )),
                ('m_abs_significant_max_int',
                 models.FloatField(
                     default=0.0,
                     help_text=
                     'Maximum absolute value of all measurement pair modulation indices for int flux that exceed SOURCE_AGGREGATE_PAIR_METRICS_MIN_ABS_VS in the pipeline run configuration.'
                 )),
                ('vs_abs_significant_max_peak',
                 models.FloatField(
                     default=0.0,
                     help_text=
                     'Maximum absolute value of all measurement pair variability t-statistics for peak flux that exceed SOURCE_AGGREGATE_PAIR_METRICS_MIN_ABS_VS in the pipeline run configuration.'
                 )),
                ('m_abs_significant_max_peak',
                 models.FloatField(
                     default=0.0,
                     help_text=
                     'Maximum absolute value of all measurement pair modulation indices for peak flux that exceed SOURCE_AGGREGATE_PAIR_METRICS_MIN_ABS_VS in the pipeline run configuration.'
                 )),
                ('n_meas',
                 models.IntegerField(
                     help_text='total measurements of the source')),
                ('n_meas_sel',
                 models.IntegerField(
                     help_text=
                     'total selavy extracted measurements of the source')),
                ('n_meas_forced',
                 models.IntegerField(
                     help_text=
                     'total force extracted measurements of the source')),
                ('n_rel',
                 models.IntegerField(
                     help_text=
                     'total relations of the source with other sources')),
                ('n_sibl',
                 models.IntegerField(
                     help_text='total siblings of the source')),
            ],
            options={
                'abstract': False,
            },
        ),
        migrations.CreateModel(
            name='Survey',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name',
                 models.CharField(help_text='Name of the Survey e.g. NVSS.',
                                  max_length=32,
                                  unique=True)),
                ('comment',
                 models.TextField(blank=True, default='', max_length=1000)),
                ('frequency',
                 models.IntegerField(help_text='Frequency of the survey.')),
            ],
            options={
                'ordering': ['name'],
            },
        ),
        migrations.CreateModel(
            name='Tagulous_Source_tags',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=255, unique=True)),
                ('slug', models.SlugField()),
                ('count',
                 models.IntegerField(
                     default=0,
                     help_text=
                     'Internal counter of how many times this tag is in use')),
                ('protected',
                 models.BooleanField(
                     default=False,
                     help_text='Will not be deleted when the count reaches 0')
                 ),
            ],
            options={
                'ordering': ('name', ),
                'abstract': False,
                'unique_together': {('slug', )},
            },
            bases=(tagulous.models.models.BaseTagModel, models.Model),
        ),
        migrations.CreateModel(
            name='SurveySource',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name',
                 models.CharField(help_text='Name of the survey source.',
                                  max_length=100)),
                ('ra',
                 models.FloatField(
                     help_text='RA of the survey source (Deg).')),
                ('ra_err',
                 models.FloatField(
                     help_text='RA error of the survey source (Deg).')),
                ('dec',
                 models.FloatField(
                     help_text='DEC of the survey source (Deg).')),
                ('dec_err',
                 models.FloatField(
                     help_text='DEC error of the survey source (Deg).')),
                ('bmaj',
                 models.FloatField(
                     help_text=
                     'The major axis of the Gaussian fit to the survey source (arcsecs).'
                 )),
                ('bmin',
                 models.FloatField(
                     help_text=
                     'The minor axis of the Gaussian fit to the survey source (arcsecs).'
                 )),
                ('pa',
                 models.FloatField(
                     help_text=
                     'Position angle of Gaussian fit east of north to bmaj (Deg).'
                 )),
                ('flux_peak',
                 models.FloatField(
                     help_text='Peak flux of the Guassian fit (Jy).')),
                ('flux_peak_err',
                 models.FloatField(
                     help_text='Peak flux error of the Gaussian fit (Jy).')),
                ('flux_int',
                 models.FloatField(
                     help_text='Integrated flux of the Guassian fit (Jy).')),
                ('flux_int_err',
                 models.FloatField(
                     help_text='Integrated flux of the Guassian fit (Jy).')),
                ('alpha',
                 models.FloatField(
                     default=0,
                     help_text='Spectral index of the survey source.')),
                ('image_name',
                 models.CharField(
                     blank=True,
                     help_text=
                     'Name of survey image where measurement was made.',
                     max_length=100)),
                ('survey',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='vast_pipeline.Survey')),
            ],
        ),
        migrations.CreateModel(
            name='SourceFav',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('comment',
                 models.TextField(
                     blank=True,
                     default='',
                     help_text='Why did you include this as favourite',
                     max_length=500)),
                ('source',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='vast_pipeline.Source')),
                ('user',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to=settings.AUTH_USER_MODEL)),
            ],
        ),
        migrations.AddField(
            model_name='source',
            name='cross_match_sources',
            field=models.ManyToManyField(through='vast_pipeline.CrossMatch',
                                         to='vast_pipeline.SurveySource'),
        ),
        migrations.AddField(
            model_name='source',
            name='related',
            field=models.ManyToManyField(through='vast_pipeline.RelatedSource',
                                         to='vast_pipeline.Source'),
        ),
        migrations.AddField(
            model_name='source',
            name='run',
            field=models.ForeignKey(
                null=True,
                on_delete=django.db.models.deletion.CASCADE,
                to='vast_pipeline.Run'),
        ),
        migrations.AddField(
            model_name='source',
            name='tags',
            field=tagulous.models.fields.TagField(
                _set_tag_meta=True,
                autocomplete_settings={'width': '100%'},
                autocomplete_view='vast_pipeline:source_tags_autocomplete',
                help_text='Enter a comma-separated tag string',
                space_delimiter=False,
                to='vast_pipeline.Tagulous_Source_tags'),
        ),
        migrations.CreateModel(
            name='SkyRegion',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('centre_ra', models.FloatField()),
                ('centre_dec', models.FloatField()),
                ('width_ra', models.FloatField()),
                ('width_dec', models.FloatField()),
                ('xtr_radius', models.FloatField()),
                ('x', models.FloatField()),
                ('y', models.FloatField()),
                ('z', models.FloatField()),
                ('run', models.ManyToManyField(to='vast_pipeline.Run')),
            ],
        ),
        migrations.AddField(
            model_name='relatedsource',
            name='from_source',
            field=models.ForeignKey(
                on_delete=django.db.models.deletion.CASCADE,
                to='vast_pipeline.Source'),
        ),
        migrations.AddField(
            model_name='relatedsource',
            name='to_source',
            field=models.ForeignKey(
                on_delete=django.db.models.deletion.CASCADE,
                related_name='related_sources',
                to='vast_pipeline.Source'),
        ),
        migrations.CreateModel(
            name='MeasurementPair',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('vs_peak',
                 models.FloatField(
                     help_text=
                     'Variability metric: t-statistic for peak fluxes.')),
                ('m_peak',
                 models.FloatField(
                     help_text=
                     'Variability metric: modulation index for peak fluxes.')),
                ('vs_int',
                 models.FloatField(
                     help_text=
                     'Variability metric: t-statistic for integrated fluxes.')
                 ),
                ('m_int',
                 models.FloatField(
                     help_text=
                     'Variability metric: modulation index for integrated fluxes.'
                 )),
                ('measurement_a',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   related_name='measurement_pairs_a',
                                   to='vast_pipeline.Measurement')),
                ('measurement_b',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   related_name='measurement_pairs_b',
                                   to='vast_pipeline.Measurement')),
                ('source',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='vast_pipeline.Source')),
            ],
        ),
        migrations.AddField(
            model_name='measurement',
            name='source',
            field=models.ManyToManyField(through='vast_pipeline.Association',
                                         to='vast_pipeline.Source'),
        ),
        migrations.AddField(
            model_name='image',
            name='run',
            field=models.ManyToManyField(to='vast_pipeline.Run'),
        ),
        migrations.AddField(
            model_name='image',
            name='skyreg',
            field=models.ForeignKey(
                on_delete=django.db.models.deletion.CASCADE,
                to='vast_pipeline.SkyRegion'),
        ),
        migrations.AddField(
            model_name='crossmatch',
            name='source',
            field=models.ForeignKey(
                on_delete=django.db.models.deletion.CASCADE,
                to='vast_pipeline.Source'),
        ),
        migrations.AddField(
            model_name='crossmatch',
            name='survey_source',
            field=models.ForeignKey(
                on_delete=django.db.models.deletion.CASCADE,
                to='vast_pipeline.SurveySource'),
        ),
        migrations.CreateModel(
            name='Comment',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('datetime', models.DateTimeField(auto_now_add=True)),
                ('comment', models.TextField()),
                ('object_id', models.PositiveIntegerField()),
                ('author',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to=settings.AUTH_USER_MODEL)),
                ('content_type',
                 models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
                                   to='contenttypes.ContentType')),
            ],
        ),
        migrations.AddField(
            model_name='association',
            name='meas',
            field=models.ForeignKey(
                on_delete=django.db.models.deletion.CASCADE,
                to='vast_pipeline.Measurement'),
        ),
        migrations.AddField(
            model_name='association',
            name='source',
            field=models.ForeignKey(
                on_delete=django.db.models.deletion.CASCADE,
                to='vast_pipeline.Source'),
        ),
        migrations.AddConstraint(
            model_name='relatedsource',
            constraint=models.UniqueConstraint(
                fields=('from_source', 'to_source'),
                name='vast_pipeline_relatedsource_unique_pair'),
        ),
        migrations.AddConstraint(
            model_name='measurementpair',
            constraint=models.UniqueConstraint(
                fields=('source', 'measurement_a', 'measurement_b'),
                name='vast_pipeline_measurementpair_unique_pair'),
        ),
    ]
Exemplo n.º 21
0
class Data(models.Model):
    PENDING, READY, ERROR, = 1, 2, 3
    STATE_CHOICES = [(PENDING, "Pending"), (READY, "Ready"), (ERROR, "Error")]
    state = models.IntegerField(default=PENDING, choices=STATE_CHOICES)

    LINK, UPLOAD, TEXTAREA = 1, 2, 3
    METHOD_CHOICE = [(LINK, "Linked Data"), (UPLOAD, "Uploaded Data"),
                     (TEXTAREA, "Text Field")]
    method = models.IntegerField(default=LINK, choices=METHOD_CHOICE)

    name = models.CharField(max_length=MAX_NAME_LEN, default="My Data")
    image = models.ImageField(default=None,
                              blank=True,
                              upload_to=image_path,
                              max_length=MAX_FIELD_LEN)

    deleted = models.BooleanField(default=False)

    # Rank on a data list.
    rank = models.FloatField(default=100)

    # The user that edited the object most recently.
    lastedit_user = models.ForeignKey(User,
                                      related_name='data_editor',
                                      null=True,
                                      on_delete=models.CASCADE)
    lastedit_date = models.DateTimeField(default=timezone.now)

    owner = models.ForeignKey(User, null=True, on_delete=models.CASCADE)
    text = models.TextField(default='Data description.',
                            max_length=MAX_TEXT_LEN,
                            blank=True)
    html = models.TextField(default='html')
    date = models.DateTimeField(auto_now_add=True)

    type = models.CharField(max_length=MAX_NAME_LEN, default="DATA")
    project = models.ForeignKey(Project, on_delete=models.CASCADE)
    size = models.BigIntegerField(default=0)

    # FilePathField points to an existing file
    file = models.FilePathField(max_length=MAX_FIELD_LEN, path='')

    # Get the file count from the toc file.
    file_count = models.IntegerField(default=0)

    uid = models.CharField(max_length=32, unique=True)

    objects = Manager()

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

    def save(self, *args, **kwargs):
        now = timezone.now()
        self.name = self.name[:MAX_NAME_LEN]
        self.uid = self.uid or util.get_uuid(8)
        self.date = self.date or now
        self.html = make_html(self.text, user=self.lastedit_user)
        self.owner = self.owner or self.project.owner
        self.type = self.type.replace(" ", '')
        self.lastedit_user = self.lastedit_user or self.owner or self.project.owner
        self.lastedit_date = self.lastedit_date or now
        # Build the data directory.
        data_dir = self.get_data_dir()
        if not os.path.isdir(data_dir):
            os.makedirs(data_dir)

        # Set the table of contents for the file.
        self.file = self.get_path()

        # Make this file if it does not exist
        if not os.path.isfile(self.file):
            with open(self.file, 'wt') as fp:
                pass

        super(Data, self).save(*args, **kwargs)

        # Set the counts
        self.project.set_counts(save=True)

    def peek(self):
        """
        Returns a preview of the data
        """
        try:
            target = self.get_path()
            lines = open(target, 'rt').readlines()
            if len(lines) == 1:
                target = lines[0]
                return util.smart_preview(target)
            else:
                data_dir = self.get_data_dir()
                rels = [os.path.relpath(path, data_dir) for path in lines]
                return "".join(rels)
        except Exception as exc:
            return f"Error :{exc}"

    def __str__(self):
        return self.name

    def get_data_dir(self):
        "The data directory"
        assert self.uid, "Sanity check. UID should always be set."
        return join(self.get_project_dir(), f"{self.uid}")

    def get_project_dir(self):
        return self.project.get_project_dir()

    def get_path(self):
        path = join(settings.TOC_ROOT, f"toc-{self.uid}.txt")
        return path

    def make_toc(self):

        tocname = self.get_path()

        collect = util.findfiles(self.get_data_dir(), collect=[])

        # Create a sorted file path collection.
        collect.sort()
        # Write the table of contents.
        with open(tocname, 'w') as fp:
            fp.write("\n".join(collect))

        # Find the cumulative size of the files.
        size = 0
        for elem in collect:
            if os.path.isfile(elem):
                size += os.stat(elem, follow_symlinks=True).st_size

        self.size = size
        self.file = tocname
        self.file_count = len(collect)

        return tocname

    def can_unpack(self):
        cond = str(self.get_path()).endswith("tar.gz")
        return cond

    def get_files(self):
        fnames = [line.strip() for line in open(self.get_path(), 'rt')]
        return fnames if len(fnames) else [""]

    def get_url(self, path=""):
        "Returns url to the data directory"
        return f"projects/{self.project.uid}/{self.uid}/" + path

    def url(self):
        return reverse('data_view', kwargs=dict(uid=self.uid))

    def fill_dict(self, obj):
        """
        Mutates a dictionary object to fill in more fields based
        on the current object.
        """
        fnames = self.get_files()
        if fnames:
            obj['value'] = fnames[0]
        else:
            obj['value'] = 'MISSING'

        obj['files'] = fnames
        obj['toc'] = self.get_path()
        obj['file_list'] = self.get_path()
        obj['id'] = self.id
        obj['name'] = self.name
        obj['uid'] = self.uid
        obj['data_dir'] = self.get_data_dir()
        obj['project_dir'] = self.get_project_dir()
        obj['data_url'] = self.url()

    @property
    def summary(self):
        """
        Returns first line of text
        """
        lines = self.text.splitlines() or ['']
        first = lines[0]
        return first

    def get_name(self):
        if self.deleted:
            return f'Deleted: {self.name}'

        return self.name
Exemplo n.º 22
0
class TranslationProject(models.Model, TreeItem):
    description = MarkupField(
        blank=True,
        help_text=_(
            'A description of this translation project. This is '
            'useful to give more information or instructions. Allowed '
            'markup: %s', get_markup_filter_name()),
    )
    language = models.ForeignKey(Language, db_index=True)
    project = models.ForeignKey(Project, db_index=True)
    real_path = models.FilePathField(editable=False)
    directory = models.OneToOneField(Directory, db_index=True, editable=False)
    pootle_path = models.CharField(max_length=255,
                                   null=False,
                                   unique=True,
                                   db_index=True,
                                   editable=False)
    disabled = models.BooleanField(default=False)

    tags = TaggableManager(
        blank=True,
        verbose_name=_("Tags"),
        help_text=_("A comma-separated list of tags."),
    )
    goals = TaggableManager(
        blank=True,
        verbose_name=_("Goals"),
        through=ItemWithGoal,
        help_text=_("A comma-separated list of goals."),
    )

    # Cached Unit values
    total_wordcount = models.PositiveIntegerField(default=0,
                                                  null=True,
                                                  editable=False)
    translated_wordcount = models.PositiveIntegerField(default=0,
                                                       null=True,
                                                       editable=False)
    fuzzy_wordcount = models.PositiveIntegerField(default=0,
                                                  null=True,
                                                  editable=False)
    suggestion_count = models.PositiveIntegerField(default=0,
                                                   null=True,
                                                   editable=False)

    _non_db_state_cache = LRUCachingDict(settings.PARSE_POOL_SIZE,
                                         settings.PARSE_POOL_CULL_FREQUENCY)

    index_directory = ".translation_index"

    objects = TranslationProjectManager()

    class Meta:
        unique_together = ('language', 'project')
        db_table = 'pootle_app_translationproject'

    ############################ Properties ###################################

    @property
    def tag_like_objects(self):
        """Return the tag like objects applied to this translation project.

        Tag like objects can be either tags or goals.
        """
        return list(
            chain(self.tags.all().order_by("name"),
                  self.goals.all().order_by("name")))

    @property
    def name(self):
        # TODO: See if `self.fullname` can be removed
        return self.fullname

    @property
    def fullname(self):
        return "%s [%s]" % (self.project.fullname, self.language.name)

    @property
    def abs_real_path(self):
        return absolute_real_path(self.real_path)

    @abs_real_path.setter
    def abs_real_path(self, value):
        self.real_path = relative_real_path(value)

    @property
    def file_style(self):
        return self.project.get_treestyle()

    @property
    def checker(self):
        from translate.filters import checks
        checkerclasses = [
            checks.projectcheckers.get(self.project.checkstyle,
                                       checks.StandardChecker),
            checks.StandardUnitChecker
        ]

        return checks.TeeChecker(checkerclasses=checkerclasses,
                                 excludefilters=excluded_filters,
                                 errorhandler=self.filtererrorhandler,
                                 languagecode=self.language.code)

    @property
    def non_db_state(self):
        if not hasattr(self, "_non_db_state"):
            try:
                self._non_db_state = self._non_db_state_cache[self.id]
            except KeyError:
                self._non_db_state = TranslationProjectNonDBState(self)
                self._non_db_state_cache[self.id] = \
                        TranslationProjectNonDBState(self)

        return self._non_db_state

    @property
    def units(self):
        self.require_units()
        # FIXME: we rely on implicit ordering defined in the model. We might
        # want to consider pootle_path as well
        return Unit.objects.filter(store__translation_project=self,
                                   state__gt=OBSOLETE).select_related('store')

    @property
    def is_terminology_project(self):
        return self.pootle_path.endswith('/terminology/')

    @property
    def is_template_project(self):
        return self == self.project.get_template_translationproject()

    @property
    def indexer(self):
        if (self.non_db_state.indexer is None
                and self.non_db_state._indexing_enabled):
            try:
                indexer = self.make_indexer()

                if not self.non_db_state._index_initialized:
                    self.init_index(indexer)
                    self.non_db_state._index_initialized = True

                self.non_db_state.indexer = indexer
            except Exception as e:
                logging.warning(
                    u"Could not initialize indexer for %s in %s: "
                    u"%s", self.project.code, self.language.code, str(e))
                self.non_db_state._indexing_enabled = False

        return self.non_db_state.indexer

    @property
    def has_index(self):
        return (self.non_db_state._indexing_enabled
                and (self.non_db_state._index_initialized
                     or self.indexer is not None))

    ############################ Cached properties ############################

    @cached_property
    def code(self):
        return u'-'.join([self.language.code, self.project.code])

    ############################ Methods ######################################

    def __unicode__(self):
        return self.pootle_path

    def __init__(self, *args, **kwargs):
        super(TranslationProject, self).__init__(*args, **kwargs)

    def save(self, *args, **kwargs):
        created = self.id is None
        project_dir = self.project.get_real_path()

        if not self.disabled:
            from pootle_app.project_tree import get_translation_project_dir
            self.abs_real_path = get_translation_project_dir(self.language,
                                                             project_dir,
                                                             self.file_style,
                                                             make_dirs=True)
            self.directory = self.language.directory \
                                        .get_or_make_subdir(self.project.code)
            self.pootle_path = self.directory.pootle_path

        super(TranslationProject, self).save(*args, **kwargs)

        if created:
            self.scan_files()

    def delete(self, *args, **kwargs):
        directory = self.directory
        super(TranslationProject, self).delete(*args, **kwargs)
        #TODO: avoid an access to directory while flushing the cache
        directory.flush_cache()
        directory.delete()

    def get_absolute_url(self):
        lang, proj, dir, fn = split_pootle_path(self.pootle_path)
        return reverse('pootle-tp-overview', args=[lang, proj, dir, fn])

    def get_translate_url(self, **kwargs):
        lang, proj, dir, fn = split_pootle_path(self.pootle_path)
        return u''.join([
            reverse('pootle-tp-translate', args=[lang, proj, dir, fn]),
            get_editor_filter(**kwargs),
        ])

    def filtererrorhandler(self, functionname, str1, str2, e):
        logging.error(u"Error in filter %s: %r, %r, %s", functionname, str1,
                      str2, e)
        return False

    def is_accessible_by(self, user):
        """Returns `True` if the current translation project is accessible
        by `user`.
        """
        if user.is_superuser:
            return True

        return self.project.code in Project.accessible_by_user(user)

    def update(self):
        """Update all stores to reflect state on disk."""
        stores = self.stores.exclude(file='').filter(state__gte=PARSED)
        for store in stores.iterator():
            store.update(update_translation=True, update_structure=True)

    def sync(self, conservative=True, skip_missing=False, modified_since=0):
        """Sync unsaved work on all stores to disk."""
        stores = self.stores.exclude(file='').filter(state__gte=PARSED)
        for store in stores.iterator():
            store.sync(update_translation=True,
                       update_structure=not conservative,
                       conservative=conservative,
                       create=False,
                       skip_missing=skip_missing,
                       modified_since=modified_since)

    def get_mtime(self):
        return self.directory.get_mtime()

    def require_units(self):
        """Makes sure all stores are parsed"""
        errors = 0
        for store in self.stores.filter(state__lt=PARSED).iterator():
            try:
                store.require_units()
            except IntegrityError:
                logging.info(u"Duplicate IDs in %s", store.abs_real_path)
                errors += 1
            except ParseError as e:
                logging.info(u"Failed to parse %s\n%s", store.abs_real_path, e)
                errors += 1
            except (IOError, OSError) as e:
                logging.info(u"Can't access %s\n%s", store.abs_real_path, e)
                errors += 1

        return errors

    ### TreeItem

    def get_children(self):
        return self.directory.get_children()

    def get_total_wordcount(self):
        return self.total_wordcount

    def get_translated_wordcount(self):
        return self.translated_wordcount

    def get_fuzzy_wordcount(self):
        return self.fuzzy_wordcount

    def get_suggestion_count(self):
        return self.suggestion_count

    def get_cachekey(self):
        return self.directory.pootle_path

    def get_parents(self):
        return [self.language, self.project]

    def _get_path_summary(self):
        from pootle_misc.stats import get_translate_actions
        return get_translate_actions(self.directory)

    ### /TreeItem

    def update_against_templates(self, pootle_path=None):
        """Update translation project from templates."""

        if self.is_template_project:
            return

        template_translation_project = self.project \
                                           .get_template_translationproject()

        if (template_translation_project is None
                or template_translation_project == self):
            return

        monolingual = self.project.is_monolingual

        if not monolingual:
            self.sync()

        from pootle_app.project_tree import (convert_template,
                                             get_translated_name,
                                             get_translated_name_gnu)

        for store in template_translation_project.stores.iterator():
            if self.file_style == 'gnu':
                new_pootle_path, new_path = get_translated_name_gnu(
                    self, store)
            else:
                new_pootle_path, new_path = get_translated_name(self, store)

            if pootle_path is not None and new_pootle_path != pootle_path:
                continue

            try:
                from pootle.scripts import hooks
                relative_po_path = os.path.relpath(new_path,
                                                   settings.PODIRECTORY)
                if not hooks.hook(self.project.code, "pretemplateupdate",
                                  relative_po_path):
                    continue
            except:
                # Assume hook is not present.
                pass

            convert_template(self, store, new_pootle_path, new_path,
                             monolingual)

        all_files, new_files = self.scan_files(vcs_sync=False)

        from pootle_misc import versioncontrol
        project_path = self.project.get_real_path()

        if new_files and versioncontrol.hasversioning(project_path):
            from pootle.scripts import hooks

            message = ("New files added from %s based on templates" %
                       get_site_title())

            filestocommit = []
            for new_file in new_files:
                try:
                    hook_files = hooks.hook(self.project.code,
                                            "precommit",
                                            new_file.file.name,
                                            author=None,
                                            message=message)
                    filestocommit.extend(hook_files)
                except ImportError:
                    # Failed to import the hook - we're going to assume there
                    # just isn't a hook to import. That means we'll commit the
                    # original file.
                    filestocommit.append(new_file.file.name)

            success = True
            try:
                output = versioncontrol.add_files(project_path, filestocommit,
                                                  message)
            except Exception:
                logging.exception(u"Failed to add files")
                success = False

            for new_file in new_files:
                try:
                    hooks.hook(self.project.code,
                               "postcommit",
                               new_file.file.name,
                               success=success)
                except:
                    #FIXME: We should not hide the exception - makes
                    # development impossible
                    pass

        if pootle_path is None:
            from pootle_app.models.signals import post_template_update
            post_template_update.send(sender=self)

    def scan_files(self, vcs_sync=True):
        """Scan the file system and return a list of translation files.

        :param vcs_sync: boolean on whether or not to synchronise the PO
                         directory with the VCS checkout.
        """
        proj_ignore = [p.strip() for p in self.project.ignoredfiles.split(',')]
        ignored_files = set(proj_ignore)
        ext = os.extsep + self.project.localfiletype

        # Scan for pots if template project
        if self.is_template_project:
            ext = os.extsep + self.project.get_template_filetype()

        from pootle_app.project_tree import (add_files,
                                             match_template_filename,
                                             direct_language_match_filename,
                                             sync_from_vcs)

        all_files = []
        new_files = []

        if self.file_style == 'gnu':
            if self.pootle_path.startswith('/templates/'):
                file_filter = lambda filename: match_template_filename(
                    self.project,
                    filename,
                )
            else:
                file_filter = lambda filename: direct_language_match_filename(
                    self.language.code,
                    filename,
                )
        else:
            file_filter = lambda filename: True

        if vcs_sync:
            sync_from_vcs(ignored_files, ext, self.real_path, file_filter)

        all_files, new_files = add_files(
            self,
            ignored_files,
            ext,
            self.real_path,
            self.directory,
            file_filter,
        )

        return all_files, new_files

    def update_file_from_version_control(self, store):
        from pootle.scripts import hooks
        store.sync(update_translation=True)

        filetoupdate = store.file.name
        try:
            filetoupdate = hooks.hook(self.project.code, "preupdate",
                                      store.file.name)
        except:
            pass

        # Keep a copy of working files in memory before updating
        working_copy = store.file.store

        try:
            logging.debug(u"Updating %s from version control", store.file.name)
            from pootle_misc import versioncontrol
            versioncontrol.update_file(filetoupdate)
            store.file._delete_store_cache()
            store.file._update_store_cache()
        except Exception:
            # Something wrong, file potentially modified, bail out
            # and replace with working copy
            logging.exception(
                u"Near fatal catastrophe, while updating %s "
                u"from version control", store.file.name)
            working_copy.save()

            raise VersionControlError

        try:
            hooks.hook(self.project.code, "postupdate", store.file.name)
        except:
            pass

        try:
            logging.debug(u"Parsing version control copy of %s into db",
                          store.file.name)
            store.update(update_structure=True, update_translation=True)

            #FIXME: try to avoid merging if file was not updated
            logging.debug(u"Merging %s with version control update",
                          store.file.name)
            store.mergefile(working_copy,
                            None,
                            allownewstrings=False,
                            suggestions=True,
                            notranslate=False,
                            obsoletemissing=False)
        except Exception:
            logging.exception(
                u"Near fatal catastrophe, while merging %s with "
                u"version control copy", store.file.name)
            working_copy.save()
            store.update(update_structure=True, update_translation=True)
            raise

    def update_dir(self, request=None, directory=None):
        """Updates translation project's files from version control, retaining
        uncommitted translations.
        """
        remote_stats = {}

        from pootle_misc import versioncontrol
        try:
            versioncontrol.update_dir(self.real_path)
        except IOError as e:
            logging.exception(u"Error during update of %s", self.real_path)
            if request:
                msg = _("Failed to update from version control: %(error)s",
                        {"error": e})
                messages.error(request, msg)
            return

        all_files, new_files = self.scan_files()
        new_file_set = set(new_files)

        from pootle.scripts import hooks

        # Go through all stores except any pootle-terminology.* ones
        if directory.is_translationproject():
            stores = self.stores.exclude(file="")
        else:
            stores = directory.stores.exclude(file="")

        for store in stores.iterator():
            if store in new_file_set:
                continue

            store.sync(update_translation=True)
            filetoupdate = store.file.name
            try:
                filetoupdate = hooks.hook(self.project.code, "preupdate",
                                          store.file.name)
            except:
                pass

            # keep a copy of working files in memory before updating
            working_copy = store.file.store

            versioncontrol.copy_to_podir(filetoupdate)
            store.file._delete_store_cache()
            store.file._update_store_cache()

            try:
                hooks.hook(self.project.code, "postupdate", store.file.name)
            except:
                pass

            try:
                logging.debug(u"Parsing version control copy of %s into db",
                              store.file.name)
                store.update(update_structure=True, update_translation=True)

                #FIXME: Try to avoid merging if file was not updated
                logging.debug(u"Merging %s with version control update",
                              store.file.name)
                store.mergefile(working_copy,
                                None,
                                allownewstrings=False,
                                suggestions=True,
                                notranslate=False,
                                obsoletemissing=False)
            except Exception:
                logging.exception(
                    u"Near fatal catastrophe, while merging %s "
                    "with version control copy", store.file.name)
                working_copy.save()
                store.update(update_structure=True, update_translation=True)
                raise

        if request:
            msg = \
                _(u'Updated project <em>%(project)s</em> from version control',
                  {'project': self.fullname})
            messages.info(request, msg)

        from pootle_app.models.signals import post_vc_update
        post_vc_update.send(sender=self)

    def update_file(self, request, store):
        """Updates file from version control, retaining uncommitted
        translations"""
        try:
            self.update_file_from_version_control(store)

            # FIXME: This belongs to views
            msg = _(u'Updated file <em>%(filename)s</em> from version control',
                    {'filename': store.file.name})
            messages.info(request, msg)

            from pootle_app.models.signals import post_vc_update
            post_vc_update.send(sender=self)
        except VersionControlError as e:
            # FIXME: This belongs to views
            msg = _(
                u"Failed to update <em>%(filename)s</em> from "
                u"version control: %(error)s", {
                    'filename': store.file.name,
                    'error': e,
                })
            messages.error(request, msg)

        self.scan_files()

    def commit_dir(self, user, directory, request=None):
        """Commits files under a directory to version control.

        This does not do permission checking.
        """
        self.sync()
        total = directory.get_total_wordcount()
        translated = directory.get_translated_wordcount()
        fuzzy = directory.get_fuzzy_wordcount()
        author = user.username

        message = stats_message_raw(
            "Commit from %s by user %s." % (get_site_title(), author), total,
            translated, fuzzy)

        # Try to append email as well, since some VCS does not allow omitting
        # it (ie. Git).
        if user.is_authenticated() and len(user.email):
            author += " <%s>" % user.email

        if directory.is_translationproject():
            stores = list(self.stores.exclude(file=""))
        else:
            stores = list(directory.stores.exclude(file=""))

        filestocommit = []

        from pootle.scripts import hooks
        for store in stores:
            try:
                filestocommit.extend(
                    hooks.hook(self.project.code,
                               "precommit",
                               store.file.name,
                               author=author,
                               message=message))
            except ImportError:
                # Failed to import the hook - we're going to assume there just
                # isn't a hook to import. That means we'll commit the original
                # file.
                filestocommit.append(store.file.name)

        success = True
        try:
            from pootle_misc import versioncontrol
            project_path = self.project.get_real_path()
            versioncontrol.add_files(project_path, filestocommit, message,
                                     author)
            # FIXME: This belongs to views
            if request is not None:
                msg = _(
                    "Committed all files under <em>%(path)s</em> to "
                    "version control", {'path': directory.pootle_path})
                messages.success(request, msg)
        except Exception as e:
            logging.exception(u"Failed to commit directory")

            # FIXME: This belongs to views
            if request is not None:
                msg = _("Failed to commit to version control: %(error)s",
                        {'error': e})
                messages.error(request, msg)

            success = False

        for store in stores:
            try:
                hooks.hook(self.project.code,
                           "postcommit",
                           store.file.name,
                           success=success)
            except:
                #FIXME: We should not hide the exception - makes development
                # impossible
                pass

        from pootle_app.models.signals import post_vc_commit
        post_vc_commit.send(sender=self,
                            path_obj=directory,
                            user=user,
                            success=success)

        return success

    def commit_file(self, user, store, request=None):
        """Commits an individual file to version control.

        This does not do permission checking.
        """
        store.sync(update_structure=False,
                   update_translation=True,
                   conservative=True)
        total = store.get_total_wordcount()
        translated = store.get_translated_wordcount()
        fuzzy = store.get_fuzzy_wordcount()
        author = user.username

        message = stats_message_raw("Commit from %s by user %s." % \
                (get_site_title(), author), total, translated, fuzzy)

        # Try to append email as well, since some VCS does not allow omitting
        # it (ie. Git).
        if user.is_authenticated() and len(user.email):
            author += " <%s>" % user.email

        from pootle.scripts import hooks
        try:
            filestocommit = hooks.hook(self.project.code,
                                       "precommit",
                                       store.file.name,
                                       author=author,
                                       message=message)
        except ImportError:
            # Failed to import the hook - we're going to assume there just
            # isn't a hook to import. That means we'll commit the original
            # file.
            filestocommit = [store.file.name]

        success = True
        try:
            from pootle_misc import versioncontrol
            for file in filestocommit:
                versioncontrol.commit_file(file,
                                           message=message,
                                           author=author)

                # FIXME: This belongs to views
                if request is not None:
                    msg = _(
                        "Committed file <em>%(filename)s</em> to version "
                        "control", {'filename': file})
                    messages.success(request, msg)
        except Exception as e:
            logging.exception(u"Failed to commit file")

            # FIXME: This belongs to views
            if request is not None:
                msg_params = {
                    'filename': filename,
                    'error': e,
                }
                msg = _(
                    "Failed to commit <em>%(filename)s</em> to version "
                    "control: %(error)s", msg_params)
                messages.error(request, msg)

            success = False

        try:
            hooks.hook(self.project.code,
                       "postcommit",
                       store.file.name,
                       success=success)
        except:
            #FIXME: We should not hide the exception - makes development
            # impossible
            pass

        from pootle_app.models.signals import post_vc_commit
        post_vc_commit.send(sender=self,
                            path_obj=store,
                            user=user,
                            success=success)

        return success

    def initialize(self):
        try:
            from pootle.scripts import hooks
            hooks.hook(self.project.code, "initialize", self.real_path,
                       self.language.code)
        except Exception:
            logging.exception(u"Failed to initialize (%s)", self.language.code)

    ###########################################################################

    def get_archive(self, stores, path=None):
        """Returns an archive of the given files."""
        import shutil
        import subprocess
        from pootle_misc import ptempfile as tempfile

        tempzipfile = None
        archivecontents = None

        try:
            # Using zip command line is fast
            # The temporary file below is opened and immediately closed for
            # security reasons
            fd, tempzipfile = tempfile.mkstemp(prefix='pootle', suffix='.zip')
            os.close(fd)
            archivecontents = open(tempzipfile, "wb")

            file_list = u" ".join(
                store.abs_real_path[len(self.abs_real_path)+1:] \
                for store in stores.iterator()
            )
            process = subprocess.Popen(['zip', '-r', '-', file_list],
                                       cwd=self.abs_real_path,
                                       stdout=archivecontents)
            result = process.wait()

            if result == 0:
                if path is not None:
                    shutil.move(tempzipfile, path)
                    return
                else:
                    filedata = open(tempzipfile, "r").read()
                    if filedata:
                        return filedata
                    else:
                        raise Exception("failed to read temporary zip file")
            else:
                raise Exception("zip command returned error code: %d" % result)
        except Exception as e:
            # But if it doesn't work, we can do it from Python.
            logging.debug(e)
            logging.debug("falling back to zipfile module")
            if path is not None:
                if tempzipfile is None:
                    fd, tempzipfile = tempfile.mkstemp(prefix='pootle',
                                                       suffix='.zip')
                    os.close(fd)
                    archivecontents = open(tempzipfile, "wb")
            else:
                import cStringIO
                archivecontents = cStringIO.StringIO()

            import zipfile
            archive = zipfile.ZipFile(archivecontents, 'w',
                                      zipfile.ZIP_DEFLATED)
            for store in stores.iterator():
                archive.write(
                    store.abs_real_path.encode('utf-8'),
                    store.abs_real_path[len(self.abs_real_path) +
                                        1:].encode('utf-8'))
            archive.close()

            if path is not None:
                shutil.move(tempzipfile, path)
            else:
                return archivecontents.getvalue()
        finally:
            if tempzipfile is not None and os.path.exists(tempzipfile):
                os.remove(tempzipfile)
            try:
                archivecontents.close()
            except:
                pass

    ###########################################################################

    def make_indexer(self):
        """Get an indexing object for this project.

        Since we do not want to keep the indexing databases open for the
        lifetime of the TranslationProject (it is cached!), it may NOT be
        part of the Project object, but should be used via a short living
        local variable.
        """
        logging.debug(u"Loading indexer for %s", self.pootle_path)
        indexdir = os.path.join(self.abs_real_path, self.index_directory)
        from translate.search import indexing
        indexer = indexing.get_indexer(indexdir)
        indexer.set_field_analyzers({
            "pofilename": indexer.ANALYZER_EXACT,
            "pomtime": indexer.ANALYZER_EXACT,
            "dbid": indexer.ANALYZER_EXACT,
        })

        return indexer

    def init_index(self, indexer):
        """Initializes the search index."""
        #FIXME: stop relying on pomtime so virtual files can be searchable?
        try:
            indexer.begin_transaction()
            for store in self.stores.iterator():
                try:
                    self.update_index(indexer, store)
                except OSError:
                    # Broken link or permission problem?
                    logging.exception("Error indexing %s", store)
            indexer.commit_transaction()
            indexer.flush(optimize=True)
        except Exception:
            logging.exception(u"Error opening indexer for %s", self)
            try:
                indexer.cancel_transaction()
            except:
                pass

    def update_index(self, indexer, store, unitid=None):
        """Updates the index with the contents of store (limit to
        ``unitid`` if given).

        There are two reasons for calling this function:

            1. Creating a new instance of :cls:`TranslationProject`
               (see :meth:`TranslationProject.init_index`)
               -> Check if the index is up-to-date / rebuild the index if
               necessary
            2. Translating a unit via the web interface
               -> (re)index only the specified unit(s)

        The argument ``unitid`` should be None for 1.

        Known problems:

            1. This function should get called, when the po file changes
               externally.

               WARNING: You have to stop the pootle server before manually
               changing po files, if you want to keep the index database in
               sync.
        """
        #FIXME: leverage file updated signal to check if index needs updating
        if indexer is None:
            return False

        # Check if the pomtime in the index == the latest pomtime
        pomtime = str(hash(store.get_mtime())**2)
        pofilenamequery = indexer.make_query(
            [("pofilename", store.pootle_path)], True)
        pomtimequery = indexer.make_query([("pomtime", pomtime)], True)
        gooditemsquery = indexer.make_query([pofilenamequery, pomtimequery],
                                            True)
        gooditemsnum = indexer.get_query_result(gooditemsquery) \
                              .get_matches_count()

        # If there is at least one up-to-date indexing item, then the po file
        # was not changed externally -> no need to update the database
        units = None
        if (gooditemsnum > 0) and (not unitid):
            # Nothing to be done
            return
        elif unitid is not None:
            # Update only specific item - usually translation via the web
            # interface. All other items should still be up-to-date (even with
            # an older pomtime).
            # Delete the relevant item from the database
            units = store.units.filter(id=unitid)
            itemsquery = indexer.make_query([("dbid", str(unitid))], False)
            indexer.delete_doc([pofilenamequery, itemsquery])
        else:
            # (item is None)
            # The po file is not indexed - or it was changed externally
            # delete all items of this file
            logging.debug(u"Updating %s indexer for file %s", self.pootle_path,
                          store.pootle_path)
            indexer.delete_doc({"pofilename": store.pootle_path})
            units = store.units

        addlist = []
        for unit in units.iterator():
            doc = {
                "pofilename": store.pootle_path,
                "pomtime": pomtime,
                "dbid": str(unit.id),
            }

            if unit.hasplural():
                orig = "\n".join(unit.source.strings)
                trans = "\n".join(unit.target.strings)
            else:
                orig = unit.source
                trans = unit.target

            doc.update({
                "source": orig,
                "target": trans,
                "notes": unit.getnotes(),
                "locations": unit.getlocations(),
            })
            addlist.append(doc)

        if addlist:
            for add_item in addlist:
                indexer.index_document(add_item)

    ###########################################################################

    def gettermmatcher(self):
        """Returns the terminology matcher."""
        terminology_stores = Store.objects.none()
        mtime = None

        if self.is_terminology_project:
            terminology_stores = self.stores.all()
            mtime = self.get_mtime()
        else:
            # Get global terminology first
            try:
                termproject = TranslationProject.objects.get(
                    language=self.language_id,
                    project__code='terminology',
                )
                mtime = termproject.get_mtime()
                terminology_stores = termproject.stores.all()
            except TranslationProject.DoesNotExist:
                pass

            local_terminology = self.stores.filter(
                name__startswith='pootle-terminology')
            for store in local_terminology.iterator():
                if mtime is None:
                    mtime = store.get_mtime()
                else:
                    mtime = max(mtime, store.get_mtime())

            terminology_stores = terminology_stores | local_terminology

        if mtime is None:
            return

        if mtime != self.non_db_state.termmatchermtime:
            from translate.search import match
            self.non_db_state.termmatcher = match.terminologymatcher(
                terminology_stores.iterator(), )
            self.non_db_state.termmatchermtime = mtime

        return self.non_db_state.termmatcher
Exemplo n.º 23
0
class Migration(migrations.Migration):

    initial = True

    dependencies = [
        ("django_dicom", "0001_initial"),
        migrations.swappable_dependency(settings.AUTH_USER_MODEL),
        migrations.swappable_dependency(settings.SUBJECT_MODEL),
        migrations.swappable_dependency(settings.STUDY_GROUP_MODEL),
        ("django_analyses", "0001_initial"),
    ]

    operations = [
        migrations.CreateModel(
            name="NIfTI",
            fields=[
                (
                    "id",
                    models.AutoField(
                        auto_created=True,
                        primary_key=True,
                        serialize=False,
                        verbose_name="ID",
                    ),
                ),
                (
                    "created",
                    django_extensions.db.fields.CreationDateTimeField(
                        auto_now_add=True, verbose_name="created"),
                ),
                (
                    "modified",
                    django_extensions.db.fields.ModificationDateTimeField(
                        auto_now=True, verbose_name="modified"),
                ),
                ("path", models.FilePathField(max_length=1000, unique=True)),
                ("is_raw", models.BooleanField(default=False)),
            ],
            options={
                "verbose_name": "NIfTI",
            },
        ),
        migrations.CreateModel(
            name="NiftiInputDefinition",
            fields=[
                (
                    "inputdefinition_ptr",
                    models.OneToOneField(
                        auto_created=True,
                        on_delete=django.db.models.deletion.CASCADE,
                        parent_link=True,
                        primary_key=True,
                        serialize=False,
                        to="django_analyses.InputDefinition",
                    ),
                ),
            ],
            bases=("django_analyses.inputdefinition", ),
        ),
        migrations.CreateModel(
            name="NiftiOutputDefinition",
            fields=[
                (
                    "outputdefinition_ptr",
                    models.OneToOneField(
                        auto_created=True,
                        on_delete=django.db.models.deletion.CASCADE,
                        parent_link=True,
                        primary_key=True,
                        serialize=False,
                        to="django_analyses.OutputDefinition",
                    ),
                ),
            ],
            bases=("django_analyses.outputdefinition", ),
        ),
        migrations.CreateModel(
            name="Scan",
            fields=[
                (
                    "id",
                    models.AutoField(
                        auto_created=True,
                        primary_key=True,
                        serialize=False,
                        verbose_name="ID",
                    ),
                ),
                (
                    "created",
                    django_extensions.db.fields.CreationDateTimeField(
                        auto_now_add=True, verbose_name="created"),
                ),
                (
                    "modified",
                    django_extensions.db.fields.ModificationDateTimeField(
                        auto_now=True, verbose_name="modified"),
                ),
                (
                    "institution_name",
                    models.CharField(blank=True, max_length=64, null=True),
                ),
                (
                    "time",
                    models.DateTimeField(
                        blank=True,
                        help_text="The time in which the scan was acquired.",
                        null=True,
                    ),
                ),
                (
                    "description",
                    models.CharField(
                        blank=True,
                        help_text=
                        "A short description of the scan's acqusition parameters.",
                        max_length=100,
                        null=True,
                    ),
                ),
                (
                    "number",
                    models.IntegerField(
                        blank=True,
                        help_text=
                        "The number of this scan relative to the session in which it was acquired.",
                        null=True,
                        validators=[
                            django.core.validators.MinValueValidator(0)
                        ],
                    ),
                ),
                (
                    "echo_time",
                    models.FloatField(
                        blank=True,
                        help_text=
                        "The time between the application of the radiofrequency excitation pulse and the peak of the signal induced in the coil (in milliseconds).",
                        null=True,
                        validators=[
                            django.core.validators.MinValueValidator(0)
                        ],
                    ),
                ),
                (
                    "repetition_time",
                    models.FloatField(
                        blank=True,
                        help_text=
                        "The time between two successive RF pulses (in milliseconds).",
                        null=True,
                        validators=[
                            django.core.validators.MinValueValidator(0)
                        ],
                    ),
                ),
                (
                    "inversion_time",
                    models.FloatField(
                        blank=True,
                        help_text=
                        "The time between the 180-degree inversion pulse and the following spin-echo (SE) sequence (in milliseconds).",
                        null=True,
                        validators=[
                            django.core.validators.MinValueValidator(0)
                        ],
                    ),
                ),
                (
                    "spatial_resolution",
                    django.contrib.postgres.fields.ArrayField(
                        base_field=models.FloatField(),
                        blank=True,
                        null=True,
                        size=3),
                ),
                (
                    "comments",
                    models.TextField(
                        blank=True,
                        help_text=
                        "If anything noteworthy happened during acquisition, it may be noted here.",
                        max_length=1000,
                        null=True,
                    ),
                ),
                ("is_updated_from_dicom", models.BooleanField(default=False)),
                (
                    "_nifti",
                    models.OneToOneField(
                        blank=True,
                        null=True,
                        on_delete=django.db.models.deletion.SET_NULL,
                        to="django_mri.NIfTI",
                    ),
                ),
                (
                    "added_by",
                    models.ForeignKey(
                        blank=True,
                        null=True,
                        on_delete=django.db.models.deletion.SET_NULL,
                        related_name="mri_uploads",
                        to=settings.AUTH_USER_MODEL,
                    ),
                ),
                (
                    "dicom",
                    models.OneToOneField(
                        blank=True,
                        null=True,
                        on_delete=django.db.models.deletion.PROTECT,
                        related_name="scan",
                        to="django_dicom.Series",
                        verbose_name="DICOM Series",
                    ),
                ),
                (
                    "study_groups",
                    models.ManyToManyField(
                        blank=True,
                        related_name="mri_scans",
                        to=settings.STUDY_GROUP_MODEL,
                    ),
                ),
                (
                    "subject",
                    models.ForeignKey(
                        blank=True,
                        null=True,
                        on_delete=django.db.models.deletion.PROTECT,
                        related_name="mri_scans",
                        to=settings.SUBJECT_MODEL,
                    ),
                ),
            ],
            options={
                "verbose_name_plural": "MRI Scans",
            },
        ),
        migrations.CreateModel(
            name="ScanInputDefinition",
            fields=[
                (
                    "inputdefinition_ptr",
                    models.OneToOneField(
                        auto_created=True,
                        on_delete=django.db.models.deletion.CASCADE,
                        parent_link=True,
                        primary_key=True,
                        serialize=False,
                        to="django_analyses.InputDefinition",
                    ),
                ),
            ],
            bases=("django_analyses.inputdefinition", ),
        ),
        migrations.CreateModel(
            name="SequenceType",
            fields=[
                (
                    "id",
                    models.AutoField(
                        auto_created=True,
                        primary_key=True,
                        serialize=False,
                        verbose_name="ID",
                    ),
                ),
                ("title", models.CharField(max_length=255,
                                           verbose_name="title")),
                (
                    "description",
                    models.TextField(blank=True,
                                     null=True,
                                     verbose_name="description"),
                ),
                (
                    "scanning_sequence",
                    django_mri.models.fields.ChoiceArrayField(
                        base_field=models.CharField(
                            choices=[
                                ("SE", "Spin Echo"),
                                ("IR", "Inversion Recovery"),
                                ("GR", "Gradient Recalled"),
                                ("EP", "Echo Planar"),
                                ("RM", "Research Mode"),
                            ],
                            max_length=2,
                        ),
                        blank=True,
                        null=True,
                        size=5,
                    ),
                ),
                (
                    "sequence_variant",
                    django_mri.models.fields.ChoiceArrayField(
                        base_field=models.CharField(
                            choices=[
                                ("SK", "Segmented k-Space"),
                                ("MTC", "Magnetization Transfer Contrast"),
                                ("SS", "Steady State"),
                                ("TRSS", "Time Reversed Steady State"),
                                ("SP", "Spoiled"),
                                ("MP", "MAG Prepared"),
                                ("OSP", "Oversampling Phase"),
                                ("NONE", "None"),
                            ],
                            max_length=4,
                        ),
                        blank=True,
                        null=True,
                        size=None,
                    ),
                ),
            ],
            options={
                "ordering": ("title", ),
                "unique_together": {("scanning_sequence", "sequence_variant")},
            },
        ),
        migrations.CreateModel(
            name="ScanInput",
            fields=[
                (
                    "input_ptr",
                    models.OneToOneField(
                        auto_created=True,
                        on_delete=django.db.models.deletion.CASCADE,
                        parent_link=True,
                        primary_key=True,
                        serialize=False,
                        to="django_analyses.Input",
                    ),
                ),
                (
                    "definition",
                    models.ForeignKey(
                        on_delete=django.db.models.deletion.PROTECT,
                        related_name="input_set",
                        to="django_mri.ScanInputDefinition",
                    ),
                ),
                (
                    "value",
                    models.ForeignKey(
                        on_delete=django.db.models.deletion.PROTECT,
                        related_name="run_input_set",
                        to="django_mri.Scan",
                    ),
                ),
            ],
            bases=("django_analyses.input", ),
        ),
        migrations.CreateModel(
            name="NiftiOutput",
            fields=[
                (
                    "output_ptr",
                    models.OneToOneField(
                        auto_created=True,
                        on_delete=django.db.models.deletion.CASCADE,
                        parent_link=True,
                        primary_key=True,
                        serialize=False,
                        to="django_analyses.Output",
                    ),
                ),
                (
                    "definition",
                    models.ForeignKey(
                        on_delete=django.db.models.deletion.PROTECT,
                        related_name="output_set",
                        to="django_mri.NiftiOutputDefinition",
                    ),
                ),
                (
                    "value",
                    models.ForeignKey(
                        on_delete=django.db.models.deletion.PROTECT,
                        related_name="run_output_set",
                        to="django_mri.NIfTI",
                    ),
                ),
            ],
            bases=("django_analyses.output", ),
        ),
        migrations.CreateModel(
            name="NiftiInput",
            fields=[
                (
                    "input_ptr",
                    models.OneToOneField(
                        auto_created=True,
                        on_delete=django.db.models.deletion.CASCADE,
                        parent_link=True,
                        primary_key=True,
                        serialize=False,
                        to="django_analyses.Input",
                    ),
                ),
                (
                    "definition",
                    models.ForeignKey(
                        on_delete=django.db.models.deletion.PROTECT,
                        related_name="input_set",
                        to="django_mri.NiftiInputDefinition",
                    ),
                ),
                (
                    "value",
                    models.ForeignKey(
                        on_delete=django.db.models.deletion.PROTECT,
                        related_name="run_input_set",
                        to="django_mri.NIfTI",
                    ),
                ),
            ],
            bases=("django_analyses.input", ),
        ),
    ]
Exemplo n.º 24
0
class Migration(migrations.Migration):

    initial = True

    dependencies = [
        ('company', '0001_initial'),
    ]

    operations = [
        migrations.CreateModel(
            name='Career',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('start_year', models.CharField(max_length=30, null=True)),
                ('start_month', models.CharField(max_length=30, null=True)),
                ('end_year', models.CharField(max_length=30, null=True)),
                ('end_month', models.CharField(max_length=30, null=True)),
                ('is_working', models.BooleanField(default=0)),
                ('company', models.CharField(max_length=100, null=True)),
                ('position', models.CharField(max_length=100, null=True)),
            ],
            options={
                'db_table': 'careers',
            },
        ),
        migrations.CreateModel(
            name='Job_text',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('is_working', models.CharField(max_length=50)),
                ('text', models.CharField(max_length=50)),
                ('agreement', models.BooleanField(default=0)),
            ],
            options={
                'db_table': 'job_texts',
            },
        ),
        migrations.CreateModel(
            name='Language',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
            ],
            options={
                'db_table': 'languages',
            },
        ),
        migrations.CreateModel(
            name='Level',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=30)),
            ],
            options={
                'db_table': 'levels',
            },
        ),
        migrations.CreateModel(
            name='Linguistic',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=30)),
            ],
            options={
                'db_table': 'linguistics',
            },
        ),
        migrations.CreateModel(
            name='Matchup_career',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('year', models.CharField(max_length=50)),
            ],
            options={
                'db_table': 'matchup_careers',
            },
        ),
        migrations.CreateModel(
            name='Resume',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=50, null=True)),
                ('contact', models.CharField(max_length=50, null=True)),
                ('email', models.EmailField(max_length=500, null=True)),
                ('description', models.TextField(blank=True)),
                ('created_at', models.DateField(auto_now_add=True)),
                ('status', models.BooleanField(default=0)),
                ('is_matchup', models.BooleanField(default=0)),
                ('image_url', models.URLField(max_length=2000, null=True)),
                ('title', models.CharField(max_length=150, null=True)),
                ('income', models.IntegerField(default=0)),
                ('total_work', models.IntegerField(default=0, null=True)),
                ('is_job_category', models.BooleanField(default=0)),
                ('job_category',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='company.Job_category')),
                ('matchup_career',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Matchup_career')),
            ],
            options={
                'db_table': 'resumes',
            },
        ),
        migrations.CreateModel(
            name='User',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=50)),
                ('email', models.EmailField(max_length=500)),
                ('password', models.CharField(max_length=500)),
                ('agreement', models.BooleanField(default=0)),
                ('contact', models.CharField(max_length=50, null=True)),
                ('image_url',
                 models.URLField(
                     default=
                     'https://s3.ap-northeast-2.amazonaws.com/wanted-public/profile_default.png',
                     max_length=2000)),
                ('created_at', models.DateTimeField(auto_now_add=True)),
                ('updated_at', models.DateTimeField(auto_now=True)),
                ('fail_count', models.IntegerField(default=0)),
                ('deleted', models.BooleanField(default=0)),
                ('job_position', models.CharField(max_length=100, null=True)),
                ('country',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='company.Country')),
                ('user_bookmark',
                 models.ManyToManyField(related_name='user_bookmark',
                                        through='company.Bookmark',
                                        to='company.Position')),
            ],
            options={
                'db_table': 'users',
            },
        ),
        migrations.CreateModel(
            name='Want',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('created_at', models.DateTimeField(auto_now_add=True)),
                ('company',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='company.Company')),
                ('user',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.User')),
            ],
            options={
                'db_table': 'wants',
            },
        ),
        migrations.CreateModel(
            name='Test',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name', models.CharField(max_length=300, null=True)),
                ('score', models.CharField(max_length=150, null=True)),
                ('date', models.DateField(null=True)),
                ('language',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Language')),
            ],
            options={
                'db_table': 'tests',
            },
        ),
        migrations.CreateModel(
            name='Security',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('user_ip', models.CharField(max_length=100)),
                ('browser', models.CharField(max_length=1000)),
                ('date', models.DateField(auto_now=True)),
                ('user',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.User')),
            ],
            options={
                'db_table': 'securities',
            },
        ),
        migrations.CreateModel(
            name='Resume_role',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('resume',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Resume')),
                ('role',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='company.Role')),
            ],
            options={
                'db_table': 'resume_roles',
            },
        ),
        migrations.CreateModel(
            name='Resume_file',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('resume_file', models.FilePathField(path='/user/resumes')),
                ('user',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.User')),
            ],
            options={
                'db_table': 'resume_files',
            },
        ),
        migrations.AddField(
            model_name='resume',
            name='resume_resume_role',
            field=models.ManyToManyField(related_name='resume_resume_role',
                                         through='user.Resume_role',
                                         to='company.Role'),
        ),
        migrations.AddField(
            model_name='resume',
            name='user',
            field=models.ForeignKey(
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                to='user.User'),
        ),
        migrations.CreateModel(
            name='Result',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('start_year', models.CharField(max_length=30, null=True)),
                ('start_month', models.CharField(max_length=30, null=True)),
                ('end_year', models.CharField(max_length=30, null=True)),
                ('end_month', models.CharField(max_length=30, null=True)),
                ('title', models.CharField(max_length=300, null=True)),
                ('content', models.CharField(max_length=300, null=True)),
                ('career',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Career')),
            ],
            options={
                'db_table': 'results',
            },
        ),
        migrations.CreateModel(
            name='Matchup_skill',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('skill', models.CharField(max_length=50, null=True)),
                ('resume',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Resume')),
            ],
            options={
                'db_table': 'matchup_skills',
            },
        ),
        migrations.CreateModel(
            name='Matchup_job',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('job_text',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Job_text')),
                ('resume',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Resume')),
            ],
            options={
                'db_table': 'matchup_jobs',
            },
        ),
        migrations.CreateModel(
            name='Link',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('url', models.URLField(max_length=2000, null=True)),
                ('resume',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Resume')),
            ],
            options={
                'db_table': 'links',
            },
        ),
        migrations.AddField(
            model_name='language',
            name='level',
            field=models.ForeignKey(
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                to='user.Level'),
        ),
        migrations.AddField(
            model_name='language',
            name='linguistic',
            field=models.ForeignKey(
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                to='user.Linguistic'),
        ),
        migrations.AddField(
            model_name='language',
            name='resume',
            field=models.ForeignKey(
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                to='user.Resume'),
        ),
        migrations.CreateModel(
            name='Exception',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('company',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='company.Company')),
                ('resume',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Resume')),
            ],
            options={
                'db_table': 'exceptions',
            },
        ),
        migrations.CreateModel(
            name='Education',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('start_year', models.CharField(max_length=30, null=True)),
                ('start_month', models.CharField(max_length=30, null=True)),
                ('end_year', models.CharField(max_length=30, null=True)),
                ('end_month', models.CharField(max_length=30, null=True)),
                ('school', models.CharField(max_length=100, null=True)),
                ('is_working', models.BooleanField(default=0)),
                ('specialism', models.CharField(max_length=100, null=True)),
                ('subject', models.CharField(max_length=200, null=True)),
                ('resume',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Resume')),
            ],
            options={
                'db_table': 'educations',
            },
        ),
        migrations.AddField(
            model_name='career',
            name='resume',
            field=models.ForeignKey(
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                to='user.Resume'),
        ),
        migrations.CreateModel(
            name='Award',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('date_year', models.CharField(max_length=30, null=True)),
                ('date_month', models.CharField(max_length=30, null=True)),
                ('name', models.CharField(max_length=100, null=True)),
                ('content', models.CharField(max_length=200, null=True)),
                ('resume',
                 models.ForeignKey(
                     null=True,
                     on_delete=django.db.models.deletion.SET_NULL,
                     to='user.Resume')),
            ],
            options={
                'db_table': 'awards',
            },
        ),
    ]
Exemplo n.º 25
0
class Migration(migrations.Migration):

    initial = True

    dependencies = []

    operations = [
        migrations.CreateModel(
            name="Recipient",
            fields=[
                (
                    "id",
                    models.AutoField(
                        auto_created=True,
                        primary_key=True,
                        serialize=False,
                        verbose_name="ID",
                    ),
                ),
                ("name", models.CharField(max_length=200)),
                ("note", models.TextField(default="")),
                ("email", models.EmailField(default="", max_length=254)),
                ("opening", models.CharField(max_length=200)),
                ("postal_adress", models.CharField(max_length=400)),
            ],
        ),
        migrations.CreateModel(
            name="RecipientCollection",
            fields=[
                (
                    "id",
                    models.AutoField(
                        auto_created=True,
                        primary_key=True,
                        serialize=False,
                        verbose_name="ID",
                    ),
                ),
                ("name", models.CharField(max_length=200)),
            ],
        ),
        migrations.CreateModel(
            name="Resolution",
            fields=[
                (
                    "id",
                    models.AutoField(
                        auto_created=True,
                        primary_key=True,
                        serialize=False,
                        verbose_name="ID",
                    ),
                ),
                ("title", models.CharField(max_length=200)),
                ("date_submitted", models.DateTimeField(auto_now_add=True)),
                ("date_enacted", models.DateTimeField(blank=True, null=True)),
                ("date_sent", models.DateField(blank=True, null=True)),
                (
                    "reso_type",
                    django_enumfield.db.fields.EnumField(
                        default=1, enum=resotool.models.ResoType),
                ),
                ("reso_text", models.TextField()),
                ("reso_text_html", models.TextField(default="")),
                ("motivation_text", models.TextField(default="")),
                ("motivation_text_html", models.TextField(default="")),
                ("pdf_path", models.FilePathField(default="")),
            ],
        ),
        migrations.CreateModel(
            name="UserGroup",
            fields=[
                (
                    "id",
                    models.AutoField(
                        auto_created=True,
                        primary_key=True,
                        serialize=False,
                        verbose_name="ID",
                    ),
                ),
                ("name", models.CharField(max_length=100)),
            ],
        ),
        migrations.CreateModel(
            name="User",
            fields=[
                (
                    "id",
                    models.AutoField(
                        auto_created=True,
                        primary_key=True,
                        serialize=False,
                        verbose_name="ID",
                    ),
                ),
                ("name", models.CharField(max_length=200)),
                ("university", models.CharField(default="", max_length=200)),
                ("resolutions",
                 models.ManyToManyField(to="resotool.Resolution")),
                ("user_groups",
                 models.ManyToManyField(to="resotool.UserGroup")),
            ],
        ),
        migrations.CreateModel(
            name="ResolutionEmail",
            fields=[
                (
                    "id",
                    models.AutoField(
                        auto_created=True,
                        primary_key=True,
                        serialize=False,
                        verbose_name="ID",
                    ),
                ),
                ("email_text", models.TextField()),
                (
                    "status",
                    django_enumfield.db.fields.EnumField(
                        default=4, enum=resotool.models.SendStatus),
                ),
                (
                    "recipient",
                    models.ForeignKey(
                        on_delete=django.db.models.deletion.CASCADE,
                        to="resotool.Recipient",
                    ),
                ),
                (
                    "resolution",
                    models.ForeignKey(
                        on_delete=django.db.models.deletion.CASCADE,
                        to="resotool.Resolution",
                    ),
                ),
            ],
        ),
        migrations.AddField(
            model_name="recipient",
            name="recipient_collection",
            field=models.ManyToManyField(to="resotool.RecipientCollection"),
        ),
        migrations.AddField(
            model_name="recipient",
            name="resolutions",
            field=models.ManyToManyField(to="resotool.Resolution"),
        ),
    ]
Exemplo n.º 26
0
class Migration(migrations.Migration):

    dependencies = [
        ('ipsl', '0019_auto_20210112_1730'),
    ]

    operations = [
        migrations.CreateModel(
            name='site_Registration',
            fields=[
                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
                ('The', models.CharField(max_length=200)),
                ('PI', models.FilePathField(max_length=200)),
                ('sub_I', models.FilePathField(max_length=200)),
            ],
            options={
                'verbose_name': 'Site Registration',
                'verbose_name_plural': 'Site Registration',
            },
        ),
        migrations.AlterModelOptions(
            name='day1',
            options={'verbose_name': 'Day', 'verbose_name_plural': 'Day 1'},
        ),
        migrations.AlterModelOptions(
            name='day2',
            options={'verbose_name': 'Day', 'verbose_name_plural': 'Day 2'},
        ),
        migrations.AlterModelOptions(
            name='day3',
            options={'verbose_name': 'Day', 'verbose_name_plural': 'Day 3'},
        ),
        migrations.AlterModelOptions(
            name='day4',
            options={'verbose_name': 'Day', 'verbose_name_plural': 'Day 4'},
        ),
        migrations.AlterModelOptions(
            name='day5',
            options={'verbose_name': 'Day', 'verbose_name_plural': 'Day 5'},
        ),
        migrations.AlterModelOptions(
            name='day6',
            options={'verbose_name': 'Day', 'verbose_name_plural': 'Day 6'},
        ),
        migrations.AlterModelOptions(
            name='eoiv',
            options={'verbose_name': 'EOVI', 'verbose_name_plural': 'EOVI'},
        ),
        migrations.AlterModelOptions(
            name='eot',
            options={'verbose_name': 'EOT', 'verbose_name_plural': 'EOT'},
        ),
        migrations.AlterModelOptions(
            name='lfu',
            options={'verbose_name': 'LFU', 'verbose_name_plural': 'LFU'},
        ),
        migrations.AlterModelOptions(
            name='protocol',
            options={'verbose_name': 'Protocol', 'verbose_name_plural': 'Protocol'},
        ),
        migrations.AlterModelOptions(
            name='screening',
            options={'verbose_name': 'Screening', 'verbose_name_plural': 'Screening'},
        ),
        migrations.AlterModelOptions(
            name='site',
            options={'verbose_name': 'Site', 'verbose_name_plural': 'Site'},
        ),
        migrations.AlterModelOptions(
            name='sponsor',
            options={'verbose_name': 'Sponsor', 'verbose_name_plural': 'Sponsor'},
        ),
        migrations.AlterModelOptions(
            name='toc',
            options={'verbose_name': 'TOC', 'verbose_name_plural': 'TOC'},
        ),
    ]
Exemplo n.º 27
0
class Migration(migrations.Migration):

    initial = True

    dependencies = []

    operations = [
        migrations.CreateModel(
            name='Config',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('jmeter_report_path',
                 models.FilePathField(default='', verbose_name='报告存放路径')),
                ('jmeter_path',
                 models.FilePathField(default='', verbose_name='JMeter存放路径')),
                ('jtl_path',
                 models.FilePathField(default='', verbose_name='Jtl文件存放路径')),
                ('gmt_create',
                 models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
                ('gmt_modified',
                 models.DateTimeField(auto_now=True, verbose_name='修改时间')),
            ],
            options={
                'db_table': 'jmeter_config',
                'ordering': ['-gmt_modified'],
                'default_permissions': ('add', 'change'),
            },
        ),
        migrations.CreateModel(
            name='Files',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name',
                 models.CharField(default='',
                                  max_length=50,
                                  unique=True,
                                  verbose_name='文件名')),
                ('status', models.BooleanField(default=True)),
                ('file_path', models.FilePathField(verbose_name='文件')),
            ],
        ),
        migrations.CreateModel(
            name='Host',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('ip',
                 models.GenericIPAddressField(default='127.0.0.1',
                                              verbose_name='IP地址')),
                ('server', models.URLField(default='', verbose_name='域名')),
                ('gmt_create',
                 models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
                ('gmt_modified',
                 models.DateTimeField(auto_now=True, verbose_name='修改时间')),
            ],
            options={
                'db_table': 'jmeter_host',
                'ordering': ['-gmt_modified'],
                'default_permissions': ('add', 'change'),
            },
        ),
        migrations.CreateModel(
            name='Machine',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name',
                 models.CharField(default='',
                                  max_length=20,
                                  verbose_name='机器名')),
                ('port',
                 models.PositiveIntegerField(default=22, verbose_name='机器端口')),
                ('ip',
                 models.GenericIPAddressField(default='127.0.0.1',
                                              verbose_name='IP地址')),
                ('password',
                 models.CharField(default='',
                                  max_length=50,
                                  verbose_name='password')),
                ('status',
                 models.BooleanField(default=0, verbose_name='状态, 离线/在线')),
                ('is_slave',
                 models.BooleanField(default=False,
                                     verbose_name='是否是从机器, 只允许一个主机器')),
                ('gmt_create',
                 models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
                ('gmt_modified',
                 models.DateTimeField(auto_now=True, verbose_name='修改时间')),
            ],
            options={
                'db_table': 'jmeter_machine',
                'ordering': ['-gmt_modified'],
                'default_permissions': ('add', 'change'),
            },
        ),
        migrations.CreateModel(
            name='Task',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name',
                 models.CharField(default='',
                                  max_length=20,
                                  verbose_name='任务名')),
                ('run_time', models.DateTimeField(verbose_name='执行时间')),
                ('loops',
                 models.SmallIntegerField(default=1, verbose_name='循环次数')),
                ('num_threads',
                 models.PositiveIntegerField(default=1, verbose_name='线程数')),
                ('scheduler',
                 models.BooleanField(default=False, verbose_name='调度器')),
                ('duration',
                 models.PositiveIntegerField(default=0, verbose_name='持续时间')),
                ('status',
                 models.BooleanField(default=True, verbose_name='任务状态')),
                ('jmx_file', models.FilePathField(default='')),
                ('task_start_time',
                 models.DateTimeField(default='1970-01-01T00:00',
                                      verbose_name='任务开始时间')),
                ('task_end_time',
                 models.DateTimeField(default='1970-01-01T00:00',
                                      verbose_name='任务结束时间')),
                ('gmt_create',
                 models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
                ('gmt_modified',
                 models.DateTimeField(auto_now=True, verbose_name='修改时间')),
            ],
            options={
                'db_table': 'jmeter_task',
                'ordering': ['-gmt_modified'],
                'permissions': (('can_run_task', '执行性能测试任务'), ),
                'default_permissions': ('add', 'change'),
            },
        ),
        migrations.CreateModel(
            name='TaskResult',
            fields=[
                ('id',
                 models.AutoField(auto_created=True,
                                  primary_key=True,
                                  serialize=False,
                                  verbose_name='ID')),
                ('name',
                 models.CharField(default='',
                                  max_length=20,
                                  verbose_name='任务名')),
                ('run_time', models.DateTimeField(verbose_name='执行时间')),
                ('loops',
                 models.SmallIntegerField(default=1, verbose_name='循环次数')),
                ('num_threads',
                 models.PositiveIntegerField(default=1, verbose_name='线程数')),
                ('scheduler',
                 models.BooleanField(default=False, verbose_name='调度器')),
                ('duration',
                 models.PositiveIntegerField(default=0, verbose_name='持续时间')),
                ('jmx_file', models.FilePathField(default='')),
                ('data_files_id', models.CharField(default='',
                                                   max_length=100)),
                ('status', models.BooleanField(verbose_name='状态')),
                ('machines_id', models.CharField(default='', max_length=100)),
                ('gmt_create', models.DateTimeField(auto_now_add=True)),
                ('gmt_modified', models.DateTimeField(auto_now=True)),
            ],
            options={
                'db_table': 'jmeter_task_result',
                'ordering': ['-gmt_modified'],
                'default_permissions': ('add', 'change'),
            },
        ),
        migrations.AddField(
            model_name='machine',
            name='task',
            field=models.ForeignKey(
                db_constraint=False,
                default='',
                on_delete=django.db.models.deletion.CASCADE,
                related_name='machines',
                to='jmeter.Task',
                verbose_name='任务'),
        ),
        migrations.AddField(
            model_name='files',
            name='task_data_file',
            field=models.ForeignKey(
                db_constraint=False,
                default='',
                on_delete=django.db.models.deletion.CASCADE,
                related_name='task_data_file',
                to='jmeter.Task'),
        ),
    ]
Exemplo n.º 28
0
class Payinfo(models.Model):
    title = models.CharField(max_length=100)
    profile = models.CharField(max_length=200)
    price = models.FloatField()
    # 文档在我们项目中的处在的位置,文件存储的路径
    path = models.FilePathField()
Exemplo n.º 29
0
class Snippet(Content):
    """
    A snippet is a small piece of an audio recording. Usually containing one question and an answer for this question.
    Snippets are created from AudioSources uploaded by fans.
    """
    class Meta:
        ordering = ('source', 'beginning')
        verbose_name = _('snippet')
        verbose_name_plural = _('snippets')

    source = models.ForeignKey(AudioSource,
                               related_name='snippets',
                               on_delete=models.CASCADE,
                               db_index=True)
    beginning = models.PositiveIntegerField(default=0)  # In seconds
    length = models.PositiveIntegerField(default=0)  # In seconds
    file = models.FilePathField(path=get_snippet_path,
                                recursive=True,
                                match='*.mp3',
                                blank=True,
                                null=True)
    entry = models.ForeignKey(Entry,
                              related_name='snippets',
                              null=True,
                              blank=True,
                              on_delete=models.SET_NULL)
    comment = models.CharField(max_length=500, blank=True)
    muted = models.BooleanField(
        default=False, help_text=_("Is given part of the audio muted?"))
    optional = models.BooleanField(
        default=False, help_text=_("This snippet shouldn't be transcribed."))

    def __str__(self):
        return "<Snippet({}/{}): {}-{}>".format(self.source_id, self.id,
                                                self.beginning, self.ending)

    def get_ending(self):
        return self.ending

    @property
    def ending(self):
        return int(self.beginning) + self.length

    @ending.setter
    def ending(self, value):
        new_length = int(value) - int(self.beginning)
        if new_length <= 0:
            raise ValueError("Snippet ending cannot be before beginning.")
        self.length = new_length

    def delete(self, *args, **kwargs):
        try:
            os.unlink(str(self.file))
        except FileNotFoundError:
            pass
        super(Snippet, self).delete(*args, **kwargs)

    def editable(self):
        request = get_request()
        if request.user.is_staff or request.user == self.created_by:
            return True
        return False

    @property
    def position_percent(self):
        return self.beginning / self.source.length * 100

    def start_time(self) -> str:
        """
        Returns starting time formatted like this: 00:00:00.
        """
        seconds = int(self.beginning) % 60
        minutes = (int(self.beginning) // 60) % 60
        hours = int(self.beginning) // 60 // 60

        if hours:
            return "{:0>#2d}:{:0>#2d}:{:0>#2d}".format(hours, minutes, seconds)
        else:
            return "{:0>#2d}:{:0>#2d}".format(minutes, seconds)

    def update_file(self):
        from palanaeum import tasks
        if not self.muted:
            tasks.create_snippet.delay(self.id)

    def get_file_url(self):
        if not self.file:
            return False
        return '/' + self.file.replace('\\', '/')
Exemplo n.º 30
0
class profile(models.Model):
    # name= models.CharField(max_length=100)
    # user_name = models.Char
    description = models.TextField()
    technology = models.CharField(max_length=20)
    image = models.FilePathField(path="/img")