def copy_data(self): """ Copy the data from the it's point of origin, serializing it, storing it serialized as well as in it's raw form and calculate a running hash of the serialized representation """ hash_function = hashlib.sha256() self.temporary_file = tempfile.NamedTemporaryFile(mode='w+') self.copy_file = tempfile.NamedTemporaryFile(mode='w+b') for row in self.get_data_iteraror(): data = dbsafe_encode(row) self.temporary_file.write(dbsafe_encode(row)) self.temporary_file.write('\n') hash_function.update(data) if isinstance(row, types.StringTypes): self.copy_file.write(row) self.temporary_file.seek(0) self.copy_file.seek(0) self.new_hash = hash_function.hexdigest() self.data_iterator = (dbsafe_decode(line[:-1]) for line in self.temporary_file) # Return the serialized content, an iterator to decode the serialized content, a handler to the raw content and the hash return self.temporary_file, self.data_iterator, self.copy_file, self.new_hash
def copy_data(self): """ Copy the data from the it's point of origin, serializing it, storing it serialized as well as in it's raw form and calculate a running hash of the serialized representation """ HASH_FUNCTION = hashlib.sha256() try: raw_iterator = self.get_binary_iterator() except AttributeError: raw_iterator = self.get_non_binary_iterator() self.copy_file = tempfile.NamedTemporaryFile(mode='w+') for part in raw_iterator: encoded_part = dbsafe_encode(part) self.copy_file.write(encoded_part) self.copy_file.write('\n') HASH_FUNCTION.update(encoded_part) self.copy_file.seek(0) self.data_iterator = (dbsafe_decode(line) for line in self.copy_file) else: self.copy_file = tempfile.NamedTemporaryFile(mode='w+b') for part in raw_iterator: self.copy_file.write(part) HASH_FUNCTION.update(part) self.copy_file.seek(0) self.data_iterator = self.copy_file self.new_hash = HASH_FUNCTION.hexdigest()
def log_sql(self, sql, time): import traceback if random.random() < SQL_LOG_FREQ: stack = [trace for trace in traceback.extract_stack()] f = open(SQL_LOG_PATH, 'a') f.write("%s\n"%(dbsafe_encode((time, sql, stack), compress_object=True))) f.close()
def test_division_lifecycle(self): Division.objects.create( name="test", description="test", eligible_person_query=dbsafe_encode(Person.objects.u17().query), ) div: Division = Division.objects.get(name="test") el_persons = Person.objects.all() el_persons.query = div.eligible_person_query assert el_persons.count() == 1
def test_decode_error(self): def mock_decode_error(*args, **kwargs): raise Exception() model = MinimalTestingModel.objects.create(pickle_field={'foo': 'bar'}) model.save() self.assertEqual( {'foo': 'bar'}, MinimalTestingModel.objects.get(pk=model.pk).pickle_field) with patch('picklefield.fields.dbsafe_decode', mock_decode_error): encoded_value = dbsafe_encode({'foo': 'bar'}) self.assertEqual( encoded_value, MinimalTestingModel.objects.get(pk=model.pk).pickle_field)
def testSerialization(self): model = MinimalTestingModel(pickle_field={'foo': 'bar'}) serialized = serializers.serialize('json', [model]) data = json.loads(serialized) # determine output at runtime, because pickle output in python 3 # is different (but compatible with python 2) p = dbsafe_encode({'foo': 'bar'}) self.assertEquals(data, [{'pk': None, 'model': 'picklefield.minimaltestingmodel', 'fields': {"pickle_field": p}}]) for deserialized_test in serializers.deserialize('json', serialized): self.assertEquals(deserialized_test.object, model)
def test_serialization(self): model = MinimalTestingModel(pk=1, pickle_field={'foo': 'bar'}) serialized = serializers.serialize('json', [model]) data = json.loads(serialized) # determine output at runtime, because pickle output in python 3 # is different (but compatible with python 2) p = dbsafe_encode({'foo': 'bar'}) self.assertEqual(data, [{ 'pk': 1, 'model': 'tests.minimaltestingmodel', 'fields': {"pickle_field": p}}, ]) for deserialized_test in serializers.deserialize('json', serialized): self.assertEqual(deserialized_test.object, model)
def get_player_data(gsheet: gspread.Worksheet): """ Converts Google sheet data to a Pandas DataFrame. Note: This script assumes that your data contains a header file on the first row! Also note that the Google API returns 'none' from empty cells - in order for the code below to work, you'll need to make sure your sheet doesn't contain empty cells, or update the code to account for such instances. """ Division.objects.get_or_create(name="Mixed") Division.objects.get_or_create(name="Open") query = dbsafe_encode(Person.objects.filter(sex="f").query) Division.objects.get_or_create(name="Women", eligible_person_query=query) admin_user = User.objects.get(username="******") data = gsheet.get_all_values() h = data[2] # Assumes first line is header! for row in data[3:]: user_data = { "username": row[1], "email": row[h.index("e-mail")], } try: user = User.objects.update_or_create(**user_data)[0] except django.db.utils.IntegrityError: logger.info("spieler doppelt gemeldet.") continue person_data = { "firstname": row[h.index("firstname")], "lastname": row[h.index("lastname")], "sex": row[h.index("sex")].lower(), "birthdate": f"{row[h.index('birthdate (year)')]}-01-01", "user": user, } try: person = Person.objects.update_or_create(**person_data)[0] except django.core.exceptions.ValidationError: logger.info("keine geburtsdatum angegeben wird auf 2019 gesetzt.") person_data["birthdate"] = "2019-01-01" person = Person.objects.update_or_create(**person_data)[0] except django.db.utils.IntegrityError: logger.info("person existiert.") continue club_data = { "name": row[h.index("club_membership_name")], "founded_on": "2000-01-01" } try: club = Club.objects.update_or_create(**club_data)[0] except django.db.utils.IntegrityError: logger.info("club existiert.") continue role = row[h.index("club_membership_role")] if role == "": role = "Member" person_to_club_membership_data = { "valid_from": f"{row[h.index('Entry OEFSV (year)')]}-01-01", "role": role, "person": person, "club": club, "approved_by": admin_user, "reporter": admin_user, } try: personToClubMembership = PersonToClubMembership.objects.update_or_create( **person_to_club_membership_data)[0] except django.core.exceptions.ValidationError: logger.info( "keine Eintritsjahr angegeben eben wird auf 2019 gesetzt.") person_to_club_membership_data["valid_from"] = "2019-01-01" personToClubMembership = PersonToClubMembership.objects.update_or_create( **person_to_club_membership_data)[0] except django.db.utils.IntegrityError: logger.info("mitgliedschaft besteht bereits.") continue for team in TEAMS: tournament = Tournament.objects.get_or_create( name=team["turnier"], start="2019-02-02 19:00", end="2019-02-01 19:00")[0] tournament_divisions = [] for division in team["divisions"]: div = Division.objects.filter(name=division)[0] td = TournamentDivision.objects.get_or_create( tournament=tournament, division=div)[0] tournament_divisions.append(td) if row[h.index(team["name"])] != "": team_data = { "founded_on": "2000-01-01", "name": row[h.index(team["name"])], "club_membership": club } try: teams_ = Team.objects.filter(name=team_data["name"]) if teams_: team_ = teams_[0] logger.info(f"team '{team_.name}'' existiert bereits.") else: team_ = Team.objects.update_or_create(**team_data)[0] except django.db.utils.IntegrityError: logger.info(f"team '{team['name']}' existiert.") continue try: if person.sex == "f" and len(tournament_divisions) > 1: tv = tournament_divisions[1] else: tv = tournament_divisions[0] rosters_ = Roster.objects.filter(team=team_, tournament_division=tv) if rosters_: roster = rosters_[0] logger.info(f"roster '{roster}' existiert bereits.") else: roster = Roster.objects.update_or_create( team=team_, tournament_division=tv)[0] except django.db.utils.IntegrityError: logger.info(f"Roster '{roster[0]}' existiert.") continue role = (row[h.index(team["role"])], ) if role == "": role = "Player" person_to_roster_membership_data = { "role": role, "number": row[h.index(team["number"])], "person": person, "roster": roster, "approved_by": admin_user, "reporter": admin_user, } try: personToRosterRelationship = PersonToRosterRelationship.objects.update_or_create( **person_to_roster_membership_data)[0] except ValueError: logger.info("keine Nummer vergeben wird auf 0 gesetzt.") person_to_roster_membership_data["number"] = 0 try: PersonToRosterRelationship.objects.update_or_create( **person_to_roster_membership_data)[0] except django.db.utils.IntegrityError: logger.info( "Spieler {person} bereits auf roster {roster}.") continue except django.db.utils.IntegrityError: logger.info( "Spieler {person} bereits auf roster {roster}.") continue
from polymodels.utils import copy_fields from ...db.fields import FieldDefinitionTypeField, LazilyTranslatedField, PythonIdentifierField from ...hacks import patch_model_option_verbose_name_raw from ...utils import lazy_string_format, model_name, popattr from ..ordered import OrderedModel from ..model import ModelDefinitionAttribute from .managers import FieldDefinitionChoiceManager, FieldDefinitionManager patch_model_option_verbose_name_raw() NOT_PROVIDED = dbsafe_encode(models.NOT_PROVIDED) class FieldDefinitionBase(models.base.ModelBase): FIELD_CLASS_ATTR = "defined_field_class" FIELD_OPTIONS_ATTR = "defined_field_options" FIELD_DESCRIPTION_ATTR = "defined_field_description" FIELD_CATEGORY_ATTR = "defined_field_category" DEFAULT_VERBOSE_NAME = _("%s field definition") DEFAULT_VERBOSE_NAME_PLURAL = _("%s field definitions") _base_definition = None _field_definitions = {} _proxies = {} _lookups = {}
def dbsafe_encode_aes(value, *args, **kwargs): pickled = dbsafe_encode(value, *args, **kwargs) return PickledObject(aes.base64_encrypt(pickled))
from polymodels.models import BasePolymorphicModel from polymodels.utils import copy_fields from ...db.fields import (FieldDefinitionTypeField, LazilyTranslatedField, PythonIdentifierField) from ...hacks import patch_model_option_verbose_name_raw from ...utils import lazy_string_format, model_name, popattr from ..ordered import OrderedModel from ..model import ModelDefinitionAttribute from .managers import FieldDefinitionChoiceManager, FieldDefinitionManager patch_model_option_verbose_name_raw() NOT_PROVIDED = dbsafe_encode(models.NOT_PROVIDED) class FieldDefinitionBase(models.base.ModelBase): FIELD_CLASS_ATTR = 'defined_field_class' FIELD_OPTIONS_ATTR = 'defined_field_options' FIELD_DESCRIPTION_ATTR = 'defined_field_description' FIELD_CATEGORY_ATTR = 'defined_field_category' DEFAULT_VERBOSE_NAME = _("%s field definition") DEFAULT_VERBOSE_NAME_PLURAL = _("%s field definitions") _base_definition = None _field_definitions = {} _proxies = {} _lookups = {}
def default_eligible_team_query(): return dbsafe_encode(Team.objects.all().query)
def default_eligible_person_query(): return dbsafe_encode(Person.objects.all().query)