Ejemplo n.º 1
0
def deleteStory(data):

    if not current_user.is_mod():
        return getResponse(error=True,
                           message="I see what you (tried) to do there!")
    assert 'item-id' in data
    assert 'mode' in data

    delete_id = data["item-id"]
    clean_item = Story.query.filter(Story.id == delete_id).one()

    # !Ordering here matters!
    # Change-tables have to go second.
    delete_from = [
        Author,
        AuthorChanges,
        Tags,
        TagsChanges,
        Genres,
        GenresChanges,
        # Story,
        # StoryChanges,
    ]

    for clearTable in delete_from:
        clearTable.query.filter(clearTable.series == clean_item.id).delete()

    Watches.query.filter(Watches.series_id == clean_item.id).delete()
    Story.query.filter(Story.id == clean_item.id).delete()
    StoryChanges.query.filter(StoryChanges.srccol == clean_item.id).delete()
    # db.session.delete(clean_item)
    db.session.commit()

    return getResponse("Story was deleted entirely!", error=False)
Ejemplo n.º 2
0
def dispatchApiCall(reqJson):
	print("Json request:", reqJson)
	if not "mode" in reqJson:
		print("API JSON Request without mode!")
		return getResponse("No mode in API Request!", error=True)

	mode = reqJson["mode"]
	if not mode in DISPATCH_TABLE:
		print("Invalid mode in request: '{mode}'".format(mode=mode))
		return getResponse("Invalid mode in API Request ({mode})!".format(mode=mode), error=True)

	dispatch_method, auth_required, csrf_required = DISPATCH_TABLE[mode]
	try:
		if csrf_required:
			csrf.protect()

		if auth_required and not current_user.is_authenticated():
			return getResponse(LOGIN_REQ, error=True)

		else:
			ret = dispatch_method(reqJson)

	except AssertionError as e:
		traceback.print_exc()
		print(reqJson)
		return getResponse("Error processing API request: '%s'!" % e, error=True)



	return ret
Ejemplo n.º 3
0
def flatten_series_by_url(data):
	if not current_user.is_mod():
		return getResponse(error=True, message="You have to have moderator privileges to do that!")

	dups = db.engine.execute('''
		SELECT
			website, COUNT(*) AS dupes
		FROM
			series
		WHERE
			website IS NOT NULL AND website != ''
		GROUP
			BY website
		HAVING
			(COUNT(*) > 1);''')
	dups = list(dups)

	match_num = 0
	for website, number in dups:
		print(website, number)
		matches = Series.query.filter(Series.website==website).all()
		ids = [match.id for match in matches]
		zipped = list(zip(ids, ids[1:]))
		for m1, m2 in zipped:
			match_num += 1
			merge_series_ids(m1, m2)

	return getResponse("%s Items merged." % match_num, error=False)
Ejemplo n.º 4
0
def deleteGroupAutoReleases(data):

	if not current_user.is_mod():
		return getResponse(error=True, message="I see what you (tried) to do there!")

	assert 'item-id' in data
	assert 'mode' in data
	assert data['mode'] == "delete-auto-from-group"

	try:
		delete_id = int(data["item-id"])
	except ValueError:
		raise AssertionError("Failure converting item ID to integer!")


	clean_item = Translators.query.filter(Translators.id==delete_id).one()

	print(clean_item)
	for release in clean_item.releases:
		if release.changeuser == FeedFeeder.FeedFeeder.RSS_USER_ID:
			db.session.delete(release)
			# print(release.id, release.volume, release.chapter, release.postfix, release.changeuser)
		else:
			print("Not deleting: ", release.id, release.volume, release.chapter, release.postfix, release.changeuser)

	db.session.commit()

	return getResponse("Autogen releases deleted. Reloading.", error=False)
Ejemplo n.º 5
0
def flatten_series_by_url(data, admin_override=False):
    if admin_override is False and (not current_user.is_mod()):
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")

    dups = db.engine.execute('''
		SELECT
			website, COUNT(*) AS dupes
		FROM
			series
		WHERE
			website IS NOT NULL AND website != ''
		GROUP
			BY website
		HAVING
			(COUNT(*) > 1);''')
    dups = list(dups)

    match_num = 0
    for website, number in dups:
        if not "royalroadl" in website.lower():
            continue

        matches = Series.query.filter(Series.website == website).all()
        ids = [match.id for match in matches]
        zipped = list(zip(ids, ids[1:]))
        for m1, m2 in zipped:
            match_num += 1
            merge_series_ids(m1, m2)

    return getResponse("%s Items merged." % match_num, error=False)
Ejemplo n.º 6
0
def deleteStory(data):

	if not current_user.is_mod():
		return getResponse(error=True, message="I see what you (tried) to do there!")
	assert 'item-id' in data
	assert 'mode' in data

	delete_id = data["item-id"]
	clean_item = Story.query.filter(Story.id==delete_id).one()


	# !Ordering here matters!
	# Change-tables have to go second.
	delete_from = [
			Author,
			AuthorChanges,
			Tags,
			TagsChanges,
			Genres,
			GenresChanges,
			# Story,
			# StoryChanges,
		]


	for clearTable in delete_from:
		clearTable.query.filter(clearTable.series==clean_item.id).delete()

	Watches.query.filter(Watches.series_id==clean_item.id).delete()
	Story.query.filter(Story.id==clean_item.id).delete()
	StoryChanges.query.filter(StoryChanges.srccol==clean_item.id).delete()
	# db.session.delete(clean_item)
	db.session.commit()

	return getResponse("Story was deleted entirely!", error=False)
Ejemplo n.º 7
0
def dispatchApiCall(reqJson):
    print("Json request:", reqJson)
    if not "mode" in reqJson:
        print("API JSON Request without mode!")
        return getResponse("No mode in API Request!", error=True)

    mode = reqJson["mode"]
    if not mode in DISPATCH_TABLE:
        print("Invalid mode in request: '{mode}'".format(mode=mode))
        return getResponse(
            "Invalid mode in API Request ({mode})!".format(mode=mode),
            error=True)

    dispatch_method, auth_required, csrf_required = DISPATCH_TABLE[mode]
    try:
        if csrf_required:
            csrf.protect()

        if auth_required and not current_user.is_authenticated():
            return getResponse(LOGIN_REQ, error=True)

        else:
            ret = dispatch_method(reqJson)

    except AssertionError as e:
        traceback.print_exc()
        print(reqJson)
        return getResponse("Error processing API request: '%s'!" % e,
                           error=True)

    return ret
Ejemplo n.º 8
0
def bulkToggleVolumeCountedStatus(data):

    if not current_user.is_mod():
        return getResponse(error=True,
                           message="I see what you (tried) to do there!")

    assert 'item-id' in data
    assert 'mode' in data
    assert 'enable' in data
    assert data['mode'] == "toggle-volume-releases"

    try:
        series_id = int(data["item-id"])
    except ValueError:
        raise AssertionError("Failure converting item ID to integer!")

    enable = data['enable']
    assert enable in ['Include', 'Exclude']

    should_include = enable == 'Include'

    item_row = Series.query.filter(Series.id == series_id).one()

    print(item_row)
    for release in item_row.releases:
        if release.changeuser == FeedFeeder.FeedFeeder.RSS_USER_ID or release.changeuser == FeedFeeder.FeedFeeder.NU_SRC_USER_ID:
            if release.volume:
                release.include = should_include

    app.utilities.update_latest_row(item_row)

    db.session.commit()

    return getResponse("Autogen releases deleted. Reloading.", error=False)
Ejemplo n.º 9
0
def fix_escaped_quotes(dummy_data, admin_override=False):
	if admin_override is False and (not current_user.is_mod()):
		return getResponse(error=True, message="You have to have moderator privileges to do that!")

	# SELECT * FROM series WHERE title LIKE E'%\\\'%';
	bad_title = 0
	bad_desc = 0


	q = Story.query.filter(or_(Story.title.like(r"%'%"), Story.title.like(r"%’%"), Story.title.like(r"%‘%"), Story.title.like(r"%“%"), Story.title.like(r"%”%")))
	items = q.all()
	print("Name fixing processing query resulted in %s items" % len(items))
	for item in items:
		old = item.title
		new = old
		while any([r"\"" in new, r"\'" in new, "’" in new, "‘" in new, "“" in new, "”" in new]):
			new = new.replace(r"\'", "'")
			new = new.replace(r'\"', '"')
			new = new.replace(r"’", "'")
			new = new.replace(r"‘", "'")
			new = new.replace(r"“", '"')
			new = new.replace(r"”", '"')

		have = Story.query.filter(Story.title == new).scalar()
		if old != new:
			if have:
				print("Duplicate item!", (old, new), old==new)
				merge_series_ids(have.id, item.id)
			else:
				print("Fixing title.")
				item.title = new
				db.session.commit()
			bad_title += 1


	# F**K ALL SMART QUOTE BULLSHITS EVER
	q = Story.query.filter(or_(Story.description.like(r"%'%"), Story.description.like(r"%’%"), Story.description.like(r"%‘%"), Story.description.like(r"%“%"), Story.description.like(r"%”%")))

	items = q.all()
	print("Series description processing query resulted in %s items" % len(items))
	for item in items:
		old = item.description
		new = old

		while any([r"\"" in new, r"\'" in new, "’" in new, "‘" in new, "“" in new, "”" in new]):
			new = new.replace(r"\'", "'")
			new = new.replace(r'\"', '"')
			new = new.replace(r"’", "'")
			new = new.replace(r"‘", "'")
			new = new.replace(r"“", '"')
			new = new.replace(r"”", '"')
		if old != new:
			print("Fixing description smart-quotes and over-escapes for series: %s" % item.id)
			item.description = new
			db.session.commit()
			bad_desc += 1

	print("Update complete.")

	return getResponse("%s main titles, %s descriptions required fixing. %s" % (bad_title, bad_desc, conflicts), error=False)
Ejemplo n.º 10
0
def deleteGroupAutoReleases(data):

    if not current_user.is_mod():
        return getResponse(error=True,
                           message="I see what you (tried) to do there!")

    assert 'item-id' in data
    assert 'mode' in data
    assert data['mode'] == "delete-auto-from-group"

    try:
        delete_id = int(data["item-id"])
    except ValueError:
        raise AssertionError("Failure converting item ID to integer!")

    clean_item = Translators.query.filter(Translators.id == delete_id).one()

    print(clean_item)
    for release in clean_item.releases:
        if release.changeuser == FeedFeeder.FeedFeeder.RSS_USER_ID:
            db.session.delete(release)
            # print(release.id, release.volume, release.chapter, release.postfix, release.changeuser)
        else:
            print("Not deleting: ", release.id, release.volume,
                  release.chapter, release.postfix, release.changeuser)

    db.session.commit()

    return getResponse("Autogen releases deleted. Reloading.", error=False)
Ejemplo n.º 11
0
def bulkToggleVolumeCountedStatus(data):

	if not current_user.is_mod():
		return getResponse(error=True, message="I see what you (tried) to do there!")

	assert 'item-id' in data
	assert 'mode'    in data
	assert 'enable'  in data
	assert data['mode'] == "toggle-volume-releases"

	try:
		series_id = int(data["item-id"])
	except ValueError:
		raise AssertionError("Failure converting item ID to integer!")

	enable = data['enable']
	assert enable in ['Include', 'Exclude']

	should_include = enable == 'Include'


	item_row = Series.query.filter(Series.id==series_id).one()

	print(item_row)
	for release in item_row.releases:
		if release.changeuser == FeedFeeder.FeedFeeder.RSS_USER_ID or release.changeuser == FeedFeeder.FeedFeeder.NU_SRC_USER_ID:
			if release.volume:
				release.include = should_include


	app.utilities.update_latest_row(item_row)

	db.session.commit()

	return getResponse("Autogen releases deleted. Reloading.", error=False)
Ejemplo n.º 12
0
def delete_duplicate_releases(data, admin_override=False):
    if admin_override is False and (not current_user.is_mod()):
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")

    dups = db.engine.execute('''
		SELECT
			srcurl, COUNT(*) AS dupes
		FROM
			releases
		WHERE
			srcurl IS NOT NULL AND srcurl != ''
		GROUP
			BY srcurl
		HAVING
			(COUNT(*) > 1);''')
    dups = list(dups)

    match_num = 0
    mismatches = set()
    for website, number in dups:
        # print(website, number)
        matches = Releases.query.filter(Releases.srcurl == website).all()
        zipped = list(zip(matches, matches[1:]))
        for m1, m2 in zipped:
            if m1.series != m2.series:
                tup = (m1.series, m2.series)
                if tup not in mismatches:
                    print("Mismatch: ", m1.series, m2.series, m1.srcurl,
                          m2.srcurl)
                    mismatches.add(tup)
            else:
                match_num += 1
                print(m1.series, m2.series)

                # ~~~Sort by change-time, since we care more about~~~
                # ~~~the latest change (since it'll probably be more accurate)~~~
                # Edit: Now picks the older version, since untimed duplicates keep
                # cropping up from japtem.
                if m1.changetime < m2.changetime:
                    older = m1
                    newer = m2
                else:
                    older = m2
                    newer = m1
                if not older.include:
                    # If the to-be-removed chapter has been expliclty excluded from
                    # the chapter count, don't do the merge.
                    pass
                else:
                    db.session.delete(newer)
                    db.session.commit()

    # print(dups)
    # print(list(dups))

    return getResponse("%s Items merged." % match_num, error=False)
Ejemplo n.º 13
0
def delete_duplicate_releases(data, admin_override=False):
	if admin_override is False and (not current_user.is_mod()):
		return getResponse(error=True, message="You have to have moderator privileges to do that!")

	dups = db.engine.execute('''
		SELECT
			srcurl, COUNT(*) AS dupes
		FROM
			releases
		WHERE
			srcurl IS NOT NULL AND srcurl != ''
		GROUP
			BY srcurl
		HAVING
			(COUNT(*) > 1);''')
	dups = list(dups)

	match_num = 0
	mismatches = set()
	for website, number in dups:
		# print(website, number)
		matches = Releases.query.filter(Releases.srcurl==website).all()
		zipped = list(zip(matches, matches[1:]))
		for m1, m2 in zipped:
			if m1.series != m2.series:
				tup = (m1.series, m2.series)
				if tup not in mismatches:
					print("Mismatch: ", m1.series, m2.series, m1.srcurl, m2.srcurl)
					mismatches.add(tup)
			else:
				match_num += 1
				print(m1.series, m2.series)

				# ~~~Sort by change-time, since we care more about~~~
				# ~~~the latest change (since it'll probably be more accurate)~~~
				# Edit: Now picks the older version, since untimed duplicates keep
				# cropping up from japtem.
				if m1.changetime < m2.changetime:
					older = m1
					newer = m2
				else:
					older = m2
					newer = m1
				if not older.include:
					# If the to-be-removed chapter has been expliclty excluded from
					# the chapter count, don't do the merge.
					pass
				else:
					db.session.delete(newer)
					db.session.commit()

	# print(dups)
	# print(list(dups))

	return getResponse("%s Items merged." % match_num, error=False)
Ejemplo n.º 14
0
def delete_duplicate_releases(data):
	if not current_user.is_mod():
		return getResponse(error=True, message="You have to have moderator privileges to do that!")

	dups = db.engine.execute('''
		SELECT
			srcurl, COUNT(*) AS dupes
		FROM
			releases
		WHERE
			srcurl IS NOT NULL AND srcurl != ''
		GROUP
			BY srcurl
		HAVING
			(COUNT(*) > 1);''')
	dups = list(dups)

	match_num = 0
	mismatches = set()
	for website, number in dups:
		# print(website, number)
		matches = Releases.query.filter(Releases.srcurl==website).all()
		zipped = list(zip(matches, matches[1:]))
		for m1, m2 in zipped:
			if m1.series != m2.series:
				tup = (m1.series, m2.series)
				if tup not in mismatches:
					print("Mismatch!")
					print(m1.series, m2.series)
					mismatches.add(tup)
			else:
				match_num += 1
				# print(m1.series, m2.series)

				# Sort by change-time, since we care more about
				# the latest change (since it'll probably be more accurate)
				if m1.changetime < m2.changetime:
					older = m1
					newer = m2
				else:
					older = m2
					newer = m1

				db.session.delete(older)
				db.session.commit()

	# print(dups)
	# print(list(dups))

	return getResponse("%s Items merged." % match_num, error=False)
Ejemplo n.º 15
0
def dispatchApiCall(reqJson):

	forwarded_for = request.headers.get('X-Forwarded-For', None)

	# if forwarded_for == '108.28.56.67':
	# 	print("Bouncing possible abuse from %s" % (forwarded_for, ))
	# 	return getResponse("Hi there! Please contact me on github.com/fake-name/wlnupdates before doing bulk scraping, please!", error=True)

	if not "mode" in reqJson:
		print("API JSON Request without mode!")
		return getResponse("No mode in API Request!", error=True)

	mode = reqJson["mode"]
	if not mode in DISPATCH_TABLE:
		print("Invalid mode in request: '{mode}'".format(mode=mode))
		return getResponse("Invalid mode in API Request ({mode})!".format(mode=mode), error=True)

	dispatch_method, auth_required, csrf_required, rate_limited = DISPATCH_TABLE[mode]
	try:
		if csrf_required:
			csrf.protect()

		if auth_required and not current_user.is_authenticated():
			return getResponse(LOGIN_REQ, error=True)

		if rate_limited and not current_user.is_authenticated():
			limiter_key = forwarded_for + " " + mode
			if limiter_key in RATE_LIMITER:
				print("Anon User hit rate limiting. Bouncing.")
				return getResponse("API calls when not logged in are rate limited. Please either log in, or slow down. "
					"Complain at github.com/fake-name/wlnupdates/issues if this is a problem", error=True)

			print("Inserting anon requester into rate-limit cache.")
			RATE_LIMITER[limiter_key] = True

			ret = dispatch_method(reqJson)

		else:
			ret = dispatch_method(reqJson)

	except AssertionError as e:
		traceback.print_exc()
		print(reqJson)
		return getResponse("Error processing API request: '%s'!" % e, error=True)



	return ret
Ejemplo n.º 16
0
def delete_bad_tags(dummy_data, admin_override=False):
	if admin_override is False and (not current_user.is_mod()):
		return getResponse(error=True, message="You have to have moderator privileges to do that!")

	conf = get_config_json()
	print("Bad tags:", conf['delete-tags'])
	removed = 0
	for bad_tag in conf['delete-tags']:
		items = Tags.query.filter(Tags.tag == bad_tag).count()
		print("Found %s instances of tag: %s" % (items, bad_tag))
		removed += 1
		Tags.query.filter(Tags.tag == bad_tag).delete()

	save_config_json(conf)
	removed = 0
	return getResponse("Found %s tags that required patching." % (removed), error=False)
Ejemplo n.º 17
0
def addNewCover(series, updateDat):
    assert 'name' in updateDat
    assert 'file' in updateDat
    assert 'type' in updateDat

    data = DataURI(updateDat['file'])

    dathash = getHash(data.data).lower()
    have = Covers.query.filter(Covers.hash == dathash).scalar()
    if have:
        return getResponse(
            "A cover with that MD5 hash already exists! Are you accidentally adding a duplicate?",
            True)

    covpath = saveCoverFile(data.data, updateDat['name'])

    new = Covers(
        srcfname=updateDat['name'],
        series=series.id,
        description='',
        fspath=covpath,
        hash=dathash,
    )

    db.session.add(new)
    db.session.commit()
Ejemplo n.º 18
0
def toggle_counted(data):
	release = getReleaseFromId(data['id'])
	release.include = not release.include
	db.session.commit()

	flash(gettext('Release %(id)s count-state toggled. New state: %(state)s', id=release.id, state="counted" if release.include else "uncounted"))
	return getResponse("Item count-state toggled!", error=False)
Ejemplo n.º 19
0
def setReadingProgressJson(data):
	sid, progress = validateReadingProgressData(data)

	watch_row = Watches.query.filter(
			(Watches.user_id==getCurrentUserId()) &
			(Watches.series_id==sid)
		).one()

	vol, chp, frag = progress

	if chp == 0 and vol == 0:
		vol = -1
		chp = -1

	if vol == 0:
		vol = None

	if frag == 0:
		frag = None

	watch_row.volume   = vol
	watch_row.chapter  = chp
	watch_row.fragment = frag
	db.session.commit()

	return getResponse('Succeeded')
Ejemplo n.º 20
0
def setReadingProgressJson(data):
	sid, progress = validateReadingProgressData(data)

	watch_row = Watches.query.filter(
			(Watches.user_id==getCurrentUserId()) &
			(Watches.series_id==sid)
		).one()

	vol, chp = progress

	if chp == 0 and vol == 0:
		vol = -1
		chp = -1

	if vol == 0:
		vol = -1

	watch_row.volume  = vol
	watch_row.chapter = chp
	db.session.commit()

	# sid = validated['id']
	# group = Translators.query.filter(Translators.id==sid).one()

	# for entry in validated['entries']:
	# 	print(entry)

	# 	if entry['type'] == 'alternate-names':
	# 		updateGroupAltNames(group, entry['data'])
	# 	else:
	# 		raise AssertionError("Unknown modifification type!")

	return getResponse('Succeeded')
Ejemplo n.º 21
0
def updateTitle(series, newTitle_raw):

    newTitle = bleach.clean(newTitle_raw.strip(), tags=[], strip=True)

    # Short circuit if nothing has changed.
    if newTitle == series.title:
        return

    conflict_series = Series.query.filter(Series.title == newTitle).scalar()

    if conflict_series and conflict_series.id != series.id:
        return getResponse(
            "A series with that name already exists! Please choose another name",
            error=True)

    oldTitle = series.title
    series.title = newTitle
    series.changeuser = getCurrentUserId()
    series.changetime = datetime.datetime.now()

    ret = updateAltNames(series, [newTitle, oldTitle], deleteother=False)
    if ret:
        return ret

    return None
Ejemplo n.º 22
0
def merge_tl_group_ids(m1, m2):
    merge_from = max(m1, m2)
    merge_to = min(m1, m2)

    itm_from = Translators.query.filter(Translators.id == merge_from).one()
    itm_to = Translators.query.filter(Translators.id == merge_to).one()

    print(itm_from)
    print(itm_to)

    Releases.query.filter(Releases.tlgroup == merge_from).update(
        {'tlgroup': merge_to})
    ReleasesChanges.query.filter(ReleasesChanges.tlgroup == merge_from).update(
        {'tlgroup': merge_to})

    app.api_handlers.updateGroupAltNames(
        itm_to, [itm.name for itm in itm_from.alt_names], delete=False)

    AlternateTranslatorNamesChanges.query                   \
     .filter(AlternateTranslatorNamesChanges.group==itm_from.id) \
     .delete(synchronize_session="fetch")

    db.session.delete(itm_from)
    db.session.commit()

    return getResponse("Success", False)
Ejemplo n.º 23
0
def deleteGroup(data):
	return getResponse("Not implemented yet!", error=True)

	# if not current_user.is_mod():
	# 	return getResponse(error=True, message="I see what you (tried) to do there!")
	# assert 'item-id' in data
	# assert 'mode' in data

	# delete_id = data["item-id"]
	# clean_item = Series.query.filter(Series.id==delete_id).one()


	# # !Ordering here matters!
	# # Change-tables have to go second.
	# delete_from = [
	# 		AlternateNames,
	# 		AlternateNamesChanges,
	# 		Author,
	# 		AuthorChanges,
	# 		Illustrators,
	# 		IllustratorsChanges,
	# 		Tags,
	# 		TagsChanges,
	# 		Genres,
	# 		GenresChanges,
	# 		Publishers,
	# 		PublishersChanges,
	# 		Covers,
	# 		CoversChanges,
	# 		Releases,
	# 		ReleasesChanges,
	# 		# Series,
	# 		# SeriesChanges,
	# 	]


	# for clearTable in delete_from:
	# 	clearTable.query.filter(clearTable.series==clean_item.id).delete()

	# Watches.query.filter(Watches.series_id==clean_item.id).delete()
	# Series.query.filter(Series.id==clean_item.id).delete()
	# SeriesChanges.query.filter(SeriesChanges.srccol==clean_item.id).delete()
	# # db.session.delete(clean_item)
	# db.session.commit()

	return getResponse("Series was deleted entirely!", error=False)
Ejemplo n.º 24
0
def clean_tags(dummy_data, admin_override=False):
    if admin_override is False and (not current_user.is_mod()):
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")
    bad_tags = 0

    bad_tags = db.session.execute('''
		SELECT
			COUNT(*)
		FROM
			tags
		WHERE
			tag IN (
			SELECT tag
			FROM (
				SELECT tag
				FROM tags
				GROUP BY tag
				HAVING COUNT(*) = 1
			) AS ONLY_ONCE
			)
		''')

    bad_tags = list(bad_tags)

    db.session.execute('''
		DELETE
		FROM
			tags
		WHERE
			tag IN (
			SELECT tag
			FROM (
				SELECT tag
				FROM tags
				GROUP BY tag
				HAVING COUNT(*) = 1
			) AS ONLY_ONCE
			)
		;
		''')
    db.session.commit()

    return getResponse("Found %s tags that required patching." % (bad_tags),
                       error=False)
Ejemplo n.º 25
0
def get_author_id(data):
	assert "id" in data, "You must specify a id to query for."
	assert is_integer(data['id']), "The 'id' member must be an integer, or a string that can cleanly cast to one."
	a_id = int(data['id'])
	author, series = item_view_items.get_author(a_id)
	if not author:
		return getResponse(error=True, message='No item found for that ID!')
	data = unpack_artist_or_illustrator(author, series)
	return getDataResponse(data)
Ejemplo n.º 26
0
def processReleaseUpdateJson(data):

    # Json request:
    # {
    #     'old-info': {
    #         'releasetime' : '2016/08/19 05:23',
    #         'release_pg'  : 'http://www.translationnations.com/2016/08/19/the-ultimate-evolution-volume-2-chapter-6/',
    #         'volume'      : 2,
    #         'postfix'     : '',
    #         'chapter'     : 6,
    #         'subChap'     : '',
    #         'counted'     : True
    #     },
    #     'mode': 'release-update',
    #     'id': 1183431,
    #     'new-info': {
    #         'releasetime' : '2016/08/19 05:23',
    #         'release_pg'  : 'http://www.translationnations.com/2016/08/19/the-ultimate-evolution-volume-2-chapter-6/',
    #         'volume'      : '2',
    #         'postfix'     : '',
    #         'chapter'     : '6',
    #         'subChap'     : '',
    #         'counted'     : False
    #     }
    # }

    assert 'mode' in data
    assert 'release-id' in data
    assert 'old-info' in data
    assert 'new-info' in data
    for subsect in [data['old-info'], data['new-info']]:
        assert 'releasetime' in subsect
        assert 'release_pg' in subsect
        assert 'volume' in subsect
        assert 'postfix' in subsect
        assert 'chapter' in subsect
        assert 'subChap' in subsect
        assert 'counted' in subsect

    data['old-info']['releasetime'] = dateutil.parser.parse(
        data['old-info']['releasetime'])
    data['new-info']['releasetime'] = dateutil.parser.parse(
        data['new-info']['releasetime'])
    if data['new-info']['releasetime'] > datetime.datetime.now():
        data['new-info']['releasetime'] = datetime.datetime.now()

    data['old-info']['volume'] = data['old-info']['volume'] if data[
        'old-info']['volume'] else 0.0
    data['old-info']['chapter'] = data['old-info']['chapter'] if data[
        'old-info']['chapter'] else 0.0
    data['old-info']['subChap'] = data['old-info']['subChap'] if data[
        'old-info']['subChap'] else 0.0

    return updateChapterRelease(data['release-id'], data['old-info'],
                                data['new-info'])

    return getResponse("processReleaseUpdateJson call!.", error=True)
Ejemplo n.º 27
0
def get_publisher_id(data):
	assert "id" in data, "You must specify a id to query for."
	assert is_integer(data['id']), "The 'id' member must be an integer, or a string that can cleanly cast to one."
	a_id = int(data['id'])
	pub, series = item_view_items.get_publisher_id(a_id)
	if not pub:
		return getResponse(error=True, message='No item found for that ID!')
	data = unpack_tag_genre_publisher(pub, series)
	return getDataResponse(data)
Ejemplo n.º 28
0
def get_author_id(data):
	assert "id" in data, "You must specify a id to query for."
	assert is_integer(data['id']), "The 'id' member must be an integer, or a string that can cleanly cast to one."
	a_id = int(data['id'])
	author, series = item_view_items.get_author(a_id)
	if not author:
		return getResponse(error=True, message='No item found for that ID!')
	data = unpack_artist_or_illustrator(author, series)
	return getDataResponse(data)
Ejemplo n.º 29
0
def get_publisher_id(data):
	assert "id" in data, "You must specify a id to query for."
	assert is_integer(data['id']), "The 'id' member must be an integer, or a string that can cleanly cast to one."
	a_id = int(data['id'])
	pub, series = item_view_items.get_publisher_id(a_id)
	if not pub:
		return getResponse(error=True, message='No item found for that ID!')
	data = unpack_tag_genre_publisher(pub, series)
	return getDataResponse(data)
Ejemplo n.º 30
0
def deleteGroup(data):
    return getResponse("Not implemented yet!", error=True)

    # if not current_user.is_mod():
    # 	return getResponse(error=True, message="I see what you (tried) to do there!")
    # assert 'item-id' in data
    # assert 'mode' in data

    # delete_id = data["item-id"]
    # clean_item = Series.query.filter(Series.id==delete_id).one()

    # # !Ordering here matters!
    # # Change-tables have to go second.
    # delete_from = [
    # 		AlternateNames,
    # 		AlternateNamesChanges,
    # 		Author,
    # 		AuthorChanges,
    # 		Illustrators,
    # 		IllustratorsChanges,
    # 		Tags,
    # 		TagsChanges,
    # 		Genres,
    # 		GenresChanges,
    # 		Publishers,
    # 		PublishersChanges,
    # 		Covers,
    # 		CoversChanges,
    # 		Releases,
    # 		ReleasesChanges,
    # 		# Series,
    # 		# SeriesChanges,
    # 	]

    # for clearTable in delete_from:
    # 	clearTable.query.filter(clearTable.series==clean_item.id).delete()

    # Watches.query.filter(Watches.series_id==clean_item.id).delete()
    # Series.query.filter(Series.id==clean_item.id).delete()
    # SeriesChanges.query.filter(SeriesChanges.srccol==clean_item.id).delete()
    # # db.session.delete(clean_item)
    # db.session.commit()

    return getResponse("Series was deleted entirely!", error=False)
Ejemplo n.º 31
0
def toggle_counted(data):
    release = getReleaseFromId(data['id'])
    release.include = not release.include
    db.session.commit()

    flash(
        gettext('Release %(id)s count-state toggled. New state: %(state)s',
                id=release.id,
                state="counted" if release.include else "uncounted"))
    return getResponse("Item count-state toggled!", error=False)
Ejemplo n.º 32
0
def clean_tags(dummy_data, admin_override=False):
	if admin_override is False and (not current_user.is_mod()):
		return getResponse(error=True, message="You have to have moderator privileges to do that!")
	bad_tags = 0

	bad_tags = db.session.execute('''
		SELECT
			COUNT(*)
		FROM
			tags
		WHERE
			tag IN (
			SELECT tag
			FROM (
				SELECT tag
				FROM tags
				GROUP BY tag
				HAVING COUNT(*) = 1
			) AS ONLY_ONCE
			)
		''')

	bad_tags = list(bad_tags)

	db.session.execute('''
		DELETE
		FROM
			tags
		WHERE
			tag IN (
			SELECT tag
			FROM (
				SELECT tag
				FROM tags
				GROUP BY tag
				HAVING COUNT(*) = 1
			) AS ONLY_ONCE
			)
		;
		''')
	db.session.commit()

	return getResponse("Found %s tags that required patching." % (bad_tags), error=False)
Ejemplo n.º 33
0
def delete_bad_tags(dummy_data, admin_override=False):
    if admin_override is False and (not current_user.is_mod()):
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")

    conf = get_config_json()
    print("Bad tags:", conf['delete-tags'])
    removed = 0
    for bad_tag in conf['delete-tags']:
        items = Tags.query.filter(Tags.tag == bad_tag).count()
        print("Found %s instances of tag: %s" % (items, bad_tag))
        removed += 1
        Tags.query.filter(Tags.tag == bad_tag).delete()

    save_config_json(conf)
    removed = 0
    return getResponse("Found %s tags that required patching." % (removed),
                       error=False)
Ejemplo n.º 34
0
def setRatingJson(data):
	assert 'mode' in data
	assert 'rating' in data
	assert 'item-id' in data
	assert data['item-id']
	sid    = int(data['item-id'])
	rating = int(data['rating'])

	app.series_tools.set_rating(sid, rating)

	return getResponse("SetRating call!.", error=False)
Ejemplo n.º 35
0
def mergeSeriesItems(data):
	if not current_user.is_mod():
		return getResponse(error=True, message="You have to have moderator privileges to do that!")


	assert 'mode' in data
	assert data['mode'] == 'merge-id'
	assert 'item-id' in data
	assert 'merge_id' in data

	m1, m2 = int(data['item-id']), int(data['merge_id'])
	return merge_series_ids(m1, m2)
Ejemplo n.º 36
0
def preventMergeSeriesItems(data):
	if not current_user.is_mod():
		return getResponse(error=True, message="You have to have moderator privileges to do that!")


	assert 'mode' in data
	assert data['mode'] == 'block-merge-id'
	assert 'item-id' in data
	assert 'separate_id' in data

	m1, m2 = int(data['item-id']), int(data['separate_id'])

	m1, m2 = min(m1, m2), max(m1, m2)
	have = get_config_json()


	if not [m1, m2] in have['no-merge-series']:
		have['no-merge-series'].append([m1, m2])
	save_config_json(have)

	return getResponse("Success", False)
Ejemplo n.º 37
0
def preventMergeSeriesItems(data):
    if not current_user.is_mod():
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")

    assert 'mode' in data
    assert data['mode'] == 'block-merge-id'
    assert 'item-id' in data
    assert 'separate_id' in data

    m1, m2 = int(data['item-id']), int(data['separate_id'])

    m1, m2 = min(m1, m2), max(m1, m2)
    have = get_config_json()

    if not [m1, m2] in have['no-merge-series']:
        have['no-merge-series'].append([m1, m2])
    save_config_json(have)

    return getResponse("Success", False)
Ejemplo n.º 38
0
def mergeGroupEntries(data):
    if not current_user.is_mod():
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")

    assert 'mode' in data
    assert data['mode'] == 'do-group-merge-id'
    assert 'item-id' in data
    assert 'merge_id' in data

    m1, m2 = int(data['item-id']), int(data['merge_id'])
    return merge_tl_group_ids(m1, m2)
Ejemplo n.º 39
0
def setRatingJson(data):
    assert 'mode' in data
    assert 'rating' in data
    assert 'item-id' in data
    assert data['item-id']
    sid = int(data['item-id'])
    rating = int(data['rating'])

    assert rating >= 0
    assert rating <= 10

    app.series_tools.set_rating(sid, rating)

    return getResponse("SetRating call!.", error=False)
Ejemplo n.º 40
0
def setSortOrder(data):
	if not current_user.is_mod():
		return getResponse(error=True, message="You have to have moderator privileges to do that!")

	# Json request: {'item-id': '32606', 'mode': 'set-sort-mode', 'sort-mode': 'chronological_order'}
	assert data['mode'] == 'set-sort-mode'
	assert 'item-id' in data
	assert 'sort-mode' in data
	mid = int(data['item-id'])
	mode = data['sort-mode']


	assert mode in ['chronological_order', 'parsed_title_order']
	itm = Series.query.filter(Series.id==mid).one()

	print(itm.sort_mode)
	print(itm)

	itm.sort_mode = mode

	db.session.commit()

	return getResponse("Success", False)
Ejemplo n.º 41
0
def setSortOrder(data):
    if not current_user.is_mod():
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")

    # Json request: {'item-id': '32606', 'mode': 'set-sort-mode', 'sort-mode': 'chronological_order'}
    assert data['mode'] == 'set-sort-mode'
    assert 'item-id' in data
    assert 'sort-mode' in data
    mid = int(data['item-id'])
    mode = data['sort-mode']

    assert mode in ['chronological_order', 'parsed_title_order']
    itm = Series.query.filter(Series.id == mid).one()

    print(itm.sort_mode)
    print(itm)

    itm.sort_mode = mode

    db.session.commit()

    return getResponse("Success", False)
Ejemplo n.º 42
0
def updateAddCoversJson(data):
    assert 'mode' in data
    assert 'entries' in data
    assert 'item-id' in data
    assert data['mode'] == 'cover-update'
    sid = data['item-id']

    series = Series.query.filter(Series.id == sid).one()

    # print(series)
    ret = None
    for entry in data['entries']:
        ret = processCoverUpdate(series, entry)
        if ret:
            return ret
    return getResponse("Success", False)
Ejemplo n.º 43
0
def updateAddCoversJson(data):
	assert 'mode' in data
	assert 'entries' in data
	assert 'item-id' in data
	assert data['mode'] == 'cover-update'
	sid = data['item-id']

	series = Series.query.filter(Series.id==sid).one()

	# print(series)
	ret = None
	for entry in data['entries']:
		ret = processCoverUpdate(series, entry)
		if ret:
			return ret
	return getResponse("Success", False)
Ejemplo n.º 44
0
def clean_spaces(dummy_data, admin_override=False):
	if admin_override is False and (not current_user.is_mod()):
		return getResponse(error=True, message="You have to have moderator privileges to do that!")


	items = Series.query.all()

	for item in items:
		if item.title != item.title.strip():

			item.title = item.title.strip()

			try:
				db.session.commit()
			except sqlalchemy.exc.IntegrityError:
				db.session.rollback()
				check_merge(item)
Ejemplo n.º 45
0
def clean_spaces(dummy_data, admin_override=False):
    if admin_override is False and (not current_user.is_mod()):
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")

    items = Series.query.all()

    for item in items:
        if item.title != item.title.strip():

            item.title = item.title.strip()

            try:
                db.session.commit()
            except sqlalchemy.exc.IntegrityError:
                db.session.rollback()
                check_merge(item)
Ejemplo n.º 46
0
def updateTitle(series, newTitle):

	newTitle = bleach.clean(newTitle.strip())

	conflict_series = Series.query.filter(Series.title==newTitle).scalar()

	if conflict_series and conflict_series.id != series.id:
		return getResponse("A series with that name already exists! Please choose another name", error=True)


	oldTitle = series.title
	series.title = newTitle

	ret = app.series_tools.updateAltNames(series, [newTitle, oldTitle], deleteother=False)
	if ret:
		return ret

	return None
Ejemplo n.º 47
0
def clean_tags(dummy_data):
	bad_tags = 0

	bad_tags = db.session.execute('''
		SELECT
			COUNT(*)
		FROM
			tags
		WHERE
			tag IN (
			SELECT tag
			FROM (
				SELECT tag
				FROM tags
				GROUP BY tag
				HAVING COUNT(*) = 1
			) AS ONLY_ONCE
			)
		''')

	bad_tags = list(bad_tags)

	db.session.execute('''
		DELETE
		FROM
			tags
		WHERE
			tag IN (
			SELECT tag
			FROM (
				SELECT tag
				FROM tags
				GROUP BY tag
				HAVING COUNT(*) = 1
			) AS ONLY_ONCE
			)
		;
		''')
	db.session.commit()

	return getResponse("Found %s tags that required patching." % (bad_tags), error=False)
Ejemplo n.º 48
0
def alterReleaseItem(data):

	if not current_user.is_mod():
		return getResponse(error=True, message="You have to have moderator privileges to do that!")
	assert 'op' in data
	assert 'mode' in data
	assert 'count' in data
	assert 'id' in data

	assert data['mode'] == "release-ctrl"

	try:
		data['id'] = int(data['id'])
	except ValueError:
		raise AssertionError("Failure converting item ID to integer!")
	assert data['count'] in BOOL_LUT
	data['count'] = BOOL_LUT[data['count']]

	assert data['op'] in RELEASE_OPS

	return RELEASE_OPS[data['op']](data)
Ejemplo n.º 49
0
def alterReleaseItem(data):

    if not current_user.is_mod():
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")
    assert 'op' in data
    assert 'mode' in data
    assert 'count' in data
    assert 'id' in data

    assert data['mode'] == "release-ctrl"

    try:
        data['id'] = int(data['id'])
    except ValueError:
        raise AssertionError("Failure converting item ID to integer!")
    assert data['count'] in BOOL_LUT
    data['count'] = BOOL_LUT[data['count']]

    assert data['op'] in RELEASE_OPS

    return RELEASE_OPS[data['op']](data)
Ejemplo n.º 50
0
def addNewCover(series, updateDat):
	assert 'name' in updateDat
	assert 'file' in updateDat
	assert 'type' in updateDat

	data = DataURI(updateDat['file'])

	dathash = getHash(data.data).lower()
	have = Covers.query.filter(Covers.hash == dathash).scalar()
	if have:
		return getResponse("A cover with that MD5 hash already exists! Are you accidentally adding a duplicate?", True)

	covpath = saveCoverFile(data.data, updateDat['name'])

	new = Covers(
		srcfname    = updateDat['name'],
		series      = series.id,
		description = '',
		fspath      = covpath,
		hash        = dathash,
		)

	db.session.add(new)
	db.session.commit()
Ejemplo n.º 51
0
def merge_tl_group_ids(m1, m2):
	merge_from = max(m1, m2)
	merge_to   = min(m1, m2)

	itm_from = Translators.query.filter(Translators.id==merge_from).one()
	itm_to   = Translators.query.filter(Translators.id==merge_to).one()

	print(itm_from)
	print(itm_to)


	Releases       .query.filter(Releases.tlgroup       ==merge_from).update({'tlgroup': merge_to})
	ReleasesChanges.query.filter(ReleasesChanges.tlgroup==merge_from).update({'tlgroup': merge_to})

	app.api_handlers.updateGroupAltNames(itm_to, [itm.name for itm in itm_from.alt_names], delete=False)

	AlternateTranslatorNamesChanges.query                   \
		.filter(AlternateTranslatorNamesChanges.group==itm_from.id) \
		.delete(synchronize_session="fetch")

	db.session.delete(itm_from)
	db.session.commit()

	return getResponse("Success", False)
Ejemplo n.º 52
0
def updateTitle(series, newTitle_raw):

	newTitle = bleach.clean(newTitle_raw.strip(), tags=[], strip=True)

	# Short circuit if nothing has changed.
	if newTitle == series.title:
		return

	conflict_series = Series.query.filter(Series.title==newTitle).scalar()

	if conflict_series and conflict_series.id != series.id:
		return getResponse("A series with that name already exists! Please choose another name", error=True)


	oldTitle          = series.title
	series.title      = newTitle
	series.changeuser = getCurrentUserId()
	series.changetime = datetime.datetime.now()

	ret = updateAltNames(series, [newTitle, oldTitle], deleteother=False)
	if ret:
		return ret

	return None
Ejemplo n.º 53
0
def setReadingProgressJson(data):
    sid, progress = validateReadingProgressData(data)

    watch_row = Watches.query.filter((Watches.user_id == getCurrentUserId())
                                     & (Watches.series_id == sid)).one()

    vol, chp, frag = progress

    if chp == 0 and vol == 0:
        vol = -1
        chp = -1

    if vol == 0:
        vol = None

    if frag == 0:
        frag = None

    watch_row.volume = vol
    watch_row.chapter = chp
    watch_row.fragment = frag
    db.session.commit()

    return getResponse('Succeeded')
Ejemplo n.º 54
0
def merge_series_ids(m1, m2):
	merge_from = max(m1, m2)
	merge_to   = min(m1, m2)

	itm_from = Series.query.filter(Series.id==merge_from).scalar()
	itm_to = Series.query.filter(Series.id==merge_to).scalar()
	if not itm_from:
		return getResponse("Item from (id: {num}) not found?".format(num=merge_from), error=True)
	if not itm_to:
		return getResponse("Item to (id: {num}) not found?".format(num=merge_to), error=True)


	alts   = []
	author = []
	illust = []
	tags   = []
	genres = []
	publs  = []

	alts.append(itm_from.title)
	for altname in AlternateNames.query.filter(AlternateNames.series==itm_from.id).all():
		alts.append(altname.name)

	for val in Author.query.filter(Author.series==itm_from.id).all():
		author.append(val.name)

	for val in Illustrators.query.filter(Illustrators.series==itm_from.id).all():
		illust.append(val.name)

	for val in Tags.query.filter(Tags.series==itm_from.id).all():
		tags.append(val.tag)

	for val in Genres.query.filter(Genres.series==itm_from.id).all():
		genres.append(val.genre)

	for val in Publishers.query.filter(Publishers.series==itm_from.id).all():
		publs.append(val.name)

	Ratings.query.filter(Ratings.series_id==merge_from).update({'series_id': merge_to})

	# !Ordering here matters!
	# Change-tables have to go second.
	delete_from = [
			AlternateNames,
			AlternateNamesChanges,
			Author,
			AuthorChanges,
			Illustrators,
			IllustratorsChanges,
			Tags,
			TagsChanges,
			Genres,
			GenresChanges,
			Publishers,
			PublishersChanges,
		]

	for clearTable in delete_from:
		clearTable.query.filter(clearTable.series==itm_from.id).delete()

	app.series_tools.updateAltNames   ( itm_to, alts,                deleteother=False )
	app.series_tools.setAuthorIllust  ( itm_to, author     = author, deleteother=False )
	app.series_tools.setAuthorIllust  ( itm_to, illust     = illust, deleteother=False )
	app.series_tools.updateTags       ( itm_to, tags       = tags,   deleteother=False )
	app.series_tools.updateGenres     ( itm_to, genres     = genres, deleteother=False )
	app.series_tools.updatePublishers ( itm_to, publishers = publs,  deleteother=False )

	# For each user watch, if the user is already watching the merge-to item,
	# just delete it. If not, update the user-id
	for watch in Watches.query.filter(Watches.series_id==itm_from.id).all():
		if not Watches                              \
				.query                                  \
				.filter(Watches.series_id==itm_to.id)   \
				.filter(Watches.user_id==watch.user_id) \
				.scalar():

			watch.series_id = itm_to.id

		else:
			db.session.delete(watch)

	if itm_from.description and not itm_to.description:
		itm_to.description = itm_from.description

	if itm_from.description and not itm_to.description:
		itm_to.description = itm_from.description
	if itm_from.type and not itm_to.type:
		itm_to.type = itm_from.type
	if itm_from.origin_loc and not itm_to.origin_loc:
		itm_to.origin_loc = itm_from.origin_loc
	if itm_from.demographic and not itm_to.demographic:
		itm_to.demographic = itm_from.demographic
	if itm_from.orig_lang and not itm_to.orig_lang:
		itm_to.orig_lang = itm_from.orig_lang
	if not itm_to.volume or (itm_from.volume and itm_from.volume > itm_to.volume):
		itm_to.volume = itm_from.volume
	if not itm_to.chapter or (itm_from.chapter and itm_from.chapter > itm_to.chapter):
		itm_to.chapter = itm_from.chapter
	if itm_from.region and not itm_to.region:
		itm_to.region = itm_from.region
	if not itm_to.tot_chapter or (itm_from.tot_chapter and itm_from.tot_chapter > itm_to.tot_chapter):
		itm_to.tot_chapter = itm_from.tot_chapter
	if not itm_to.tot_volume or (itm_from.tot_volume and itm_from.tot_volume > itm_to.tot_volume):
		itm_to.tot_volume = itm_from.tot_volume
	if itm_from.license_en and not itm_to.license_en:
		itm_to.license_en = itm_from.license_en
	if itm_from.orig_status and not itm_to.orig_status:
		itm_to.orig_status = itm_from.orig_status

	if itm_from.website and not itm_to.website:
		itm_to.website = itm_from.website
	if itm_from.pub_date and not itm_to.pub_date:
		itm_to.pub_date = itm_from.pub_date

	db.session.flush()
	sid = itm_from.id
	tid = itm_to.id
	Covers.query.filter(Covers.series==sid).update({'series': tid})
	CoversChanges.query.filter(CoversChanges.series==sid).update({'series': tid})

	# Move releases over
	Releases.query.filter(Releases.series==sid).update({'series': tid})
	ReleasesChanges.query.filter(ReleasesChanges.series==sid).update({'series': tid})

	Series.query.filter(Series.id==sid).delete(synchronize_session="fetch")
	SeriesChanges.query.filter(SeriesChanges.id==sid).delete(synchronize_session="fetch")

	db.session.commit()

	return getResponse("Success", False)
Ejemplo n.º 55
0
def consolidate_rrl_items(data, admin_override=False):
    if admin_override is False and (not current_user.is_mod()):
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")

    dups = db.engine.execute('''
		SELECT
			id, website
		FROM
			series
		WHERE
			website IS NOT NULL AND website != ''
		;''')

    match_num = 0
    paths = {}
    for idno, website in dups:
        if not "royalroadl.com" in website.lower():
            continue

        parsed = urllib.parse.urlsplit(website)
        if not parsed.path in paths:
            paths[parsed.path] = []
        paths[parsed.path].append((idno, website, parsed))

        # matches = Series.query.filter(Series.website==website).all()
        # ids = [match.id for match in matches]
        # zipped = list(zip(ids, ids[1:]))
        # for m1, m2 in zipped:
        # 	match_num += 1
        # 	merge_series_ids(m1, m2)

    long = []
    for key in paths.keys():
        if len(paths[key]) > 1:
            long.append(paths[key])

    for item in long:
        # if not all([(tmp[2].netloc == 'royalroadl.com' or tmp[2].netloc == 'www.royalroadl.com') for tmp in item]):
        assert all((
            all([item[0][2].scheme == tmp[2].scheme for tmp in item]),
            all([item[0][2].path == tmp[2].path for tmp in item]),
            all([item[0][2].query == tmp[2].query for tmp in item]),
            all([(tmp[2].netloc == 'royalroadl.com'
                  or tmp[2].netloc == 'www.royalroadl.com') for tmp in item]),
        ))
        # if not all((
        # 		all([item[0][2].scheme == tmp[2].scheme for tmp in item]),
        # 		all([item[0][2].path   == tmp[2].path for tmp in item]),
        # 		all([item[0][2].query  == tmp[2].query for tmp in item]),
        # 		all([(tmp[2].netloc == 'royalroadl.com' or tmp[2].netloc == 'www.royalroadl.com') for tmp in item]),
        # 	)):
        # 	print("Wut: ", item)
        # 	print("	", all([tmp[2].scheme for tmp in item]))
        # 	print("	", all([tmp[2].path for tmp in item]))
        # 	print("	", all([tmp[2].query for tmp in item]))
        # 	print("	", all([(tmp[2].netloc == 'royalroadl.com' or tmp[2].netloc == 'www.royalroadl.com') for tmp in item]))
        # for sub in item:
        # 	print("	", sub[2])
        print("Need to merge: ", [tmp[0] for tmp in item])

        match_num += 1
        print("merging:", item[0][0], item[1][0])
        merge_series_ids(item[0][0], item[1][0])
        db.session.commit()

    print("Merged: ", match_num)

    return getResponse("%s Items merged." % match_num, error=False)
Ejemplo n.º 56
0
def delete(data):
    release = getReleaseFromId(data['id'])
    db.session.delete(release)
    db.session.commit()
    flash(gettext('Release deleted.'))
    return getResponse("Release deleted.", error=False)
Ejemplo n.º 57
0
def merge_series_ids(m1, m2):
    merge_from = max(m1, m2)
    merge_to = min(m1, m2)

    itm_from = Series.query.filter(Series.id == merge_from).scalar()
    itm_to = Series.query.filter(Series.id == merge_to).scalar()
    if not itm_from:
        return getResponse(
            "Item from (id: {num}) not found?".format(num=merge_from),
            error=True)
    if not itm_to:
        return getResponse(
            "Item to (id: {num}) not found?".format(num=merge_to), error=True)

    alts = []
    author = []
    illust = []
    tags = []
    genres = []
    publs = []

    alts.append(itm_from.title)
    for altname in AlternateNames.query.filter(
            AlternateNames.series == itm_from.id).all():
        alts.append(altname.name)

    for val in Author.query.filter(Author.series == itm_from.id).all():
        author.append(val.name)

    for val in Illustrators.query.filter(
            Illustrators.series == itm_from.id).all():
        illust.append(val.name)

    for val in Tags.query.filter(Tags.series == itm_from.id).all():
        tags.append(val.tag)

    for val in Genres.query.filter(Genres.series == itm_from.id).all():
        genres.append(val.genre)

    for val in Publishers.query.filter(Publishers.series == itm_from.id).all():
        publs.append(val.name)

    Ratings.query.filter(Ratings.series_id == merge_from).update(
        {'series_id': merge_to})

    # !Ordering here matters!
    # Change-tables have to go second.
    delete_from = [
        AlternateNames,
        AlternateNamesChanges,
        Author,
        AuthorChanges,
        Illustrators,
        IllustratorsChanges,
        Tags,
        TagsChanges,
        Genres,
        GenresChanges,
        Publishers,
        PublishersChanges,
    ]

    for clearTable in delete_from:
        clearTable.query.filter(clearTable.series == itm_from.id).delete()

    app.series_tools.updateAltNames(itm_to, alts, deleteother=False)
    app.series_tools.setAuthorIllust(itm_to, author=author, deleteother=False)
    app.series_tools.setAuthorIllust(itm_to, illust=illust, deleteother=False)
    app.series_tools.updateTags(itm_to, tags=tags, deleteother=False)
    app.series_tools.updateGenres(itm_to, genres=genres, deleteother=False)
    app.series_tools.updatePublishers(itm_to,
                                      publishers=publs,
                                      deleteother=False)

    # For each user watch, if the user is already watching the merge-to item,
    # just delete it. If not, update the user-id
    for watch in Watches.query.filter(Watches.series_id == itm_from.id).all():
        if not Watches                              \
          .query                                  \
          .filter(Watches.series_id==itm_to.id)   \
          .filter(Watches.user_id==watch.user_id) \
          .scalar():

            watch.series_id = itm_to.id

        else:
            db.session.delete(watch)

    if itm_from.description and not itm_to.description:
        itm_to.description = itm_from.description

    if itm_from.description and not itm_to.description:
        itm_to.description = itm_from.description
    if itm_from.type and not itm_to.type:
        itm_to.type = itm_from.type
    if itm_from.origin_loc and not itm_to.origin_loc:
        itm_to.origin_loc = itm_from.origin_loc
    if itm_from.demographic and not itm_to.demographic:
        itm_to.demographic = itm_from.demographic
    if itm_from.orig_lang and not itm_to.orig_lang:
        itm_to.orig_lang = itm_from.orig_lang
    if itm_from.region and not itm_to.region:
        itm_to.region = itm_from.region
    if itm_from.license_en and not itm_to.license_en:
        itm_to.license_en = itm_from.license_en
    if itm_from.orig_status and not itm_to.orig_status:
        itm_to.orig_status = itm_from.orig_status

    if itm_from.website and not itm_to.website:
        itm_to.website = itm_from.website
    if itm_from.pub_date and not itm_to.pub_date:
        itm_to.pub_date = itm_from.pub_date

    db.session.flush()
    sid = itm_from.id
    tid = itm_to.id
    Covers.query.filter(Covers.series == sid).update({'series': tid})
    CoversChanges.query.filter(CoversChanges.series == sid).update(
        {'series': tid})

    # Move releases over
    Releases.query.filter(Releases.series == sid).update({'series': tid})
    ReleasesChanges.query.filter(ReleasesChanges.series == sid).update(
        {'series': tid})

    Series.query.filter(Series.id == sid).delete(synchronize_session="fetch")
    SeriesChanges.query.filter(SeriesChanges.id == sid).delete(
        synchronize_session="fetch")

    db.session.commit()

    app.series_tools.set_rating(sid, new_rating=None)

    return getResponse("Success", False)
Ejemplo n.º 58
0
def fix_escaped_quotes(dummy_data, admin_override=False):
    if admin_override is False and (not current_user.is_mod()):
        return getResponse(
            error=True,
            message="You have to have moderator privileges to do that!")

    # SELECT * FROM series WHERE title LIKE E'%\\\'%';
    bad_title = 0

    q = Series.query.filter(
        or_(Series.title.like(r"%'%"), Series.title.like(r"%’%"),
            Series.title.like(r"%‘%"), Series.title.like(r"%“%"),
            Series.title.like(r"%”%")))
    items = q.all()
    print("Name fixing processing query resulted in %s items" % len(items))
    for item in items:
        old = item.title
        new = old
        while any([
                r"\"" in new, r"\'" in new, "’" in new, "‘" in new, "“" in new,
                "”" in new
        ]):
            new = new.replace(r"\'", "'")
            new = new.replace(r'\"', '"')
            new = new.replace(r"’", "'")
            new = new.replace(r"‘", "'")
            new = new.replace(r"“", '"')
            new = new.replace(r"”", '"')

        have = Series.query.filter(Series.title == new).scalar()
        if old != new:
            if have:
                print("Duplicate item!", (old, new), old == new)
                merge_series_ids(have.id, item.id)
            else:
                print("Fixing title.")
                item.title = new
                db.session.commit()
            bad_title += 1

    bad_alt_title = 0

    q = AlternateNames.query.filter(
        or_(AlternateNames.name.like(r"%'%"), AlternateNames.name.like(r"%’%"),
            AlternateNames.name.like(r"%‘%"), AlternateNames.name.like(r"%“%"),
            AlternateNames.name.like(r"%”%")))

    conflicts = ''
    items = q.all()
    print("Alternate names processing query resulted in %s items" % len(items))
    for item in items:
        old = item.name
        new = old
        while any([
                r"\"" in new, r"\'" in new, "’" in new, "‘" in new, "“" in new,
                "”" in new
        ]):
            new = new.replace(r"\'", "'")
            new = new.replace(r'\"', '"')
            new = new.replace(r"’", "'")
            new = new.replace(r"‘", "'")
            new = new.replace(r"“", '"')
            new = new.replace(r"”", '"')
        if old != new:
            haves = AlternateNames.query.filter(
                AlternateNames.name == new).all()
            # assert(len(have) <= 1), "too many results - '%s'" % [(t.id, t.series, t.name) for t in have]

            for have in haves:
                if have.series == item.series:
                    print("Duplicate names")
                    assert have.series == item.series
                    # We don't care about duplicates if one is the escaped version of the other
                    db.session.delete(item)
                    db.session.commit()
                else:
                    conflicts += """\nSeries mismatch?\nSeries %s: '%s'\nSeries %s: '%s'""" % (
                        have.series, have.name, item.series, item.name)
                    print("Wat?", have.name, item.name)
                    print("Wat?", have.series, item.series)
            else:
                print("Fixing title.")
                item.name = new
                db.session.commit()
            bad_alt_title += 1

    bad_desc = 0

    # F**K ALL SMART QUOTE BULLSHITS EVER
    q = Series.query.filter(
        or_(Series.description.like(r"%'%"), Series.description.like(r"%’%"),
            Series.description.like(r"%‘%"), Series.description.like(r"%“%"),
            Series.description.like(r"%”%")))

    items = q.all()
    print("Series description processing query resulted in %s items" %
          len(items))
    for item in items:
        old = item.description
        new = old

        while any([
                r"\"" in new, r"\'" in new, "’" in new, "‘" in new, "“" in new,
                "”" in new
        ]):
            new = new.replace(r"\'", "'")
            new = new.replace(r'\"', '"')
            new = new.replace(r"’", "'")
            new = new.replace(r"‘", "'")
            new = new.replace(r"“", '"')
            new = new.replace(r"”", '"')
        if old != new:
            print(
                "Fixing description smart-quotes and over-escapes for series: %s"
                % item.id)
            item.description = new
            db.session.commit()
            bad_desc += 1

    print("Update complete.")

    return getResponse(
        "%s main titles, %s alt titles, %s descriptions required fixing.%s" %
        (bad_title, bad_alt_title, bad_desc, conflicts),
        error=False)
Ejemplo n.º 59
0
def get_watches(data):
    return getResponse(error=True, message="Not yet implemented")
Ejemplo n.º 60
0
def addStory(updateDat):
    assert 'story' in updateDat
    story = updateDat['story']

    assert 'name' in story
    assert 'auth' in story
    assert 'fname' in story
    assert 'file' in story
    assert 'desc' in story
    assert 'tags' in story

    data = DataURI(story['file'])

    dathash = getHash(data.data).lower()
    have = Story.query.filter(Story.hash == dathash).scalar()

    if have:
        # print("Have file already!")
        return getResponse(
            "A file with that MD5 hash already exists! Are you accidentally adding a duplicate?",
            True)

    have = Story.query.filter(Story.title == story['name']).scalar()
    if have:
        orig_name = story['name']
        loop = 2
        while have:
            print("Have story with that name ('%s')!" % story['name'])
            story['name'] = orig_name + " (%s)" % loop
            have = Story.query.filter(Story.title == story['name']).scalar()
            loop += 1
        print("Story added with number in name: '%s'" % story['name'])

    if len(story['name']) > 80:
        return getResponse("Maximum story title length is 80 characters!",
                           True)
    if len(story['name']) < 3:
        return getResponse("Minimum story title length is 3 characters!", True)
    if len(story['auth']) < 5:
        return getResponse("Minimum story author name length is 5 characters!",
                           True)
    if len(story['auth']) > 60:
        return getResponse(
            "Maximum story author name length is 60 characters!", True)
    if len(story['desc']) < 30:
        return getResponse(
            "Minimum story description length is 30 characters!", True)
    if len(story['desc']) > 500:
        return getResponse(
            "Maximum story description length is 500 characters!", True)

    fspath = saveFile(data.data, story['fname'])

    stags = ["-".join(itm_tags.split(" ")) for itm_tags in story['tags']]
    stags = [bleach.clean(tag, tags=[], strip=True) for tag in stags]

    # print("Author: ", story['auth'])
    # print("stags: ", story['tags'])
    # print("stags: ", stags)

    post_date = datetime.datetime.now()
    if 'ul_date' in story and isinstance(story['ul_date'], datetime.datetime):
        post_date = story['ul_date']

    new = Story(
        title=bleach.clean(story['name'], tags=[], strip=True),
        srcfname=story['fname'],
        description=markdown.markdown(bleach.clean(story['desc'], strip=True)),
        fspath=fspath,
        hash=dathash,
        # author      = [story['auth']],
        # tags        = stags,
        pub_date=post_date)

    [new.tags.append(Tags(tag=tag)) for tag in stags]
    new.author.append(
        Author(name=bleach.clean(story['auth'], tags=[], strip=True)))

    db.session.add(new)
    db.session.commit()

    flash('Your story has been added! Thanks for posting your content!')
    return getResponse("Story added", error=False)