def _copy_document_with_owner(doc, owner, uuids_map): home_dir = Directory.objects.get_home_directory(owner) doc['fields']['owner'] = [owner.username] doc['pk'] = None doc['fields']['version'] = 1 # Retrieve from the import_uuids_map if it's already been reassigned, or assign a new UUID and map it old_uuid = doc['fields']['uuid'] if uuids_map[old_uuid] is None: uuids_map[old_uuid] = uuid_default() doc['fields']['uuid'] = uuids_map[old_uuid] # Update UUID in data if needed if 'data' in doc['fields']: data = json.loads(doc['fields']['data']) if 'uuid' in data: data['uuid'] = uuids_map[old_uuid] doc['fields']['data'] = json.dumps(data) # Remap parent directory if needed parent_uuid = None if 'parent_directory' in doc['fields']: parent_uuid = doc['fields']['parent_directory'][0] if parent_uuid is not None and parent_uuid in uuids_map.keys(): if uuids_map[parent_uuid] is None: uuids_map[parent_uuid] = uuid_default() doc['fields']['parent_directory'] = [uuids_map[parent_uuid], 1, False] else: if parent_uuid is not None: LOG.warn( 'Could not find parent directory with UUID: %s in JSON import, will set parent to home directory' % parent_uuid) doc['fields']['parent_directory'] = [ home_dir.uuid, home_dir.version, home_dir.is_history ] # Remap dependencies if needed idx = 0 for dep_uuid, dep_version, dep_is_history in doc['fields']['dependencies']: if dep_uuid not in uuids_map.keys(): LOG.warn( 'Could not find dependency UUID: %s in JSON import, may cause integrity errors if not found.' % dep_uuid) else: if uuids_map[dep_uuid] is None: uuids_map[dep_uuid] = uuid_default() doc['fields']['dependencies'][idx][0] = uuids_map[dep_uuid] idx += 1 return doc
def _copy_document_with_owner(doc, owner, uuids_map): home_dir = Directory.objects.get_home_directory(owner) doc["fields"]["owner"] = [owner.username] doc["pk"] = None doc["fields"]["version"] = 1 # Retrieve from the import_uuids_map if it's already been reassigned, or assign a new UUID and map it old_uuid = doc["fields"]["uuid"] if uuids_map[old_uuid] is None: uuids_map[old_uuid] = uuid_default() doc["fields"]["uuid"] = uuids_map[old_uuid] # Update UUID in data if needed if "data" in doc["fields"]: data = json.loads(doc["fields"]["data"]) if "uuid" in data: data["uuid"] = uuids_map[old_uuid] doc["fields"]["data"] = json.dumps(data) # Remap parent directory if needed parent_uuid = None if "parent_directory" in doc["fields"]: parent_uuid = doc["fields"]["parent_directory"][0] if parent_uuid is not None and parent_uuid in uuids_map.keys(): if uuids_map[parent_uuid] is None: uuids_map[parent_uuid] = uuid_default() doc["fields"]["parent_directory"] = [uuids_map[parent_uuid], 1, False] else: if parent_uuid is not None: LOG.warn( "Could not find parent directory with UUID: %s in JSON import, will set parent to home directory" % parent_uuid ) doc["fields"]["parent_directory"] = [home_dir.uuid, home_dir.version, home_dir.is_history] # Remap dependencies if needed idx = 0 for dep_uuid, dep_version, dep_is_history in doc["fields"]["dependencies"]: if dep_uuid not in uuids_map.keys(): LOG.warn( "Could not find dependency UUID: %s in JSON import, may cause integrity errors if not found." % dep_uuid ) else: if uuids_map[dep_uuid] is None: uuids_map[dep_uuid] = uuid_default() doc["fields"]["dependencies"][idx][0] = uuids_map[dep_uuid] idx += 1 return doc
def _copy_document_with_owner(doc, owner, uuids_map): home_dir = Directory.objects.get_home_directory(owner) doc['fields']['owner'] = [owner.username] doc['pk'] = None doc['fields']['version'] = 1 # Retrieve from the import_uuids_map if it's already been reassigned, or assign a new UUID and map it old_uuid = doc['fields']['uuid'] if uuids_map[old_uuid] is None: uuids_map[old_uuid] = uuid_default() doc['fields']['uuid'] = uuids_map[old_uuid] # Remap parent directory if needed parent_uuid = None if 'parent_directory' in doc['fields']: parent_uuid = doc['fields']['parent_directory'][0] if parent_uuid is not None and parent_uuid in uuids_map.keys(): if uuids_map[parent_uuid] is None: uuids_map[parent_uuid] = uuid_default() doc['fields']['parent_directory'] = [uuids_map[parent_uuid], 1, False] else: if parent_uuid is not None: LOG.warn('Could not find parent directory with UUID: %s in JSON import, will set parent to home directory' % parent_uuid) doc['fields']['parent_directory'] = [home_dir.uuid, home_dir.version, home_dir.is_history] # Remap dependencies if needed idx = 0 for dep_uuid, dep_version, dep_is_history in doc['fields']['dependencies']: if dep_uuid not in uuids_map.keys(): LOG.warn('Could not find dependency UUID: %s in JSON import, may cause integrity errors if not found.' % dep_uuid) else: if uuids_map[dep_uuid] is None: uuids_map[dep_uuid] = uuid_default() doc['fields']['dependencies'][idx][0] = uuids_map[dep_uuid] idx += 1 return doc
def test_upload(self): queries = [ (uuid_default(), 0, "select emps.id from emps where emps.name = 'Joe' group by emps.mgr, emps.id;", 'default'), (uuid_default(), 0, "select emps.name from emps where emps.num = 007 group by emps.state, emps.name;", 'default'), (uuid_default(), 0, "select Part.partkey, max(Part.salary), Part.name, Part.type from db1.Part where Part.yyprice > 2095", 'db1'), (uuid_default(), 0, "select Part.partkey, Part.name, Part.mfgr FROM Part WHERE Part.name LIKE '%red';", 'default'), (uuid_default(), 0, "select count(*) as loans from account a where a.account_state_id in (5,9);", 'default'), (uuid_default(), 0, "select orders.key, orders.id from orders where orders.price < 9999", 'default'), (uuid_default(), 0, "select mgr.name from mgr where mgr.reports > 10 group by mgr.state;", 'default'), ] resp = self.api.upload(data=queries, data_type='queries', source_platform='hive') assert_true('status' in resp, resp) assert_true('count' in resp, resp) assert_true('state' in resp['status'], resp) assert_true('workloadId' in resp['status'], resp) assert_true('failedQueries' in resp['status'], resp) assert_true('successQueries' in resp['status'], resp) assert_true( resp['status']['state'] in ('WAITING', 'FINISHED', 'FAILED'), resp['status']['state']) resp = self.api.upload_status(workload_id=resp['status']['workloadId']) assert_true('status' in resp, resp) assert_true('state' in resp['status'], resp) assert_true('workloadId' in resp['status'], resp) i = 0 while i < 60 and resp['status']['state'] not in ('FINISHED', 'FAILED'): resp = self.api.upload_status( workload_id=resp['status']['workloadId']) i += 1 time.sleep(1) LOG.info('Upload state: %(state)s' % resp['status']) assert_true(i < 60 and resp['status']['state'] == 'FINISHED', resp) assert_equal(resp['status']['successQueries'], 7, resp)
def import_documents(request): if request.FILES.get('documents'): documents = request.FILES['documents'].read() else: documents = json.loads(request.POST.get('documents')) documents = json.loads(documents) docs = [] home_dir = Directory.objects.get_home_directory(request.user) for doc in documents: # If doc is not owned by current user, make a copy of the document if doc['fields']['owner'][0] != request.user.username: doc['fields']['owner'] = [request.user.username] doc['pk'] = None doc['fields']['version'] = 1 doc['fields']['uuid'] = uuid_default() doc['fields']['parent_directory'] = [home_dir.uuid, home_dir.version, home_dir.is_history] else: # Update existing doc or create new try: existing_doc = Document2.objects.get_by_uuid(doc['fields']['uuid'], owner=request.user) doc['pk'] = existing_doc.pk except FilesystemException, e: LOG.warn('Could not find document with UUID: %s, will create a new document on import.', doc['fields']['uuid']) doc['pk'] = None doc['fields']['version'] = 1 # Verify that parent exists, log warning and nullify parent if not found if doc['fields']['parent_directory']: uuid, version, is_history = doc['fields']['parent_directory'] if not Document2.objects.filter(uuid=uuid, version=version, is_history=is_history).exists(): LOG.warn('Could not find parent document with UUID: %s, will set parent to home directory' % uuid) doc['fields']['parent_directory'] = [home_dir.uuid, home_dir.version, home_dir.is_history] # Verify that dependencies exist, raise critical error if any dependency not found if doc['fields']['dependencies']: for uuid, version, is_history in doc['fields']['dependencies']: if not Document2.objects.filter(uuid=uuid, version=version, is_history=is_history).exists(): raise PopupException(_('Cannot import document, dependency with UUID: %s not found.') % uuid) # Set last modified date to now doc['fields']['last_modified'] = datetime.now().replace(microsecond=0).isoformat() docs.append(doc)
def import_documents(request): if request.FILES.get('documents'): documents = request.FILES['documents'].read() else: documents = json.loads(request.POST.get('documents')) documents = json.loads(documents) docs = [] home_dir = Directory.objects.get_home_directory(request.user) for doc in documents: # If doc is not owned by current user, make a copy of the document if doc['fields']['owner'][0] != request.user.username: doc['fields']['owner'] = [request.user.username] doc['pk'] = None doc['fields']['version'] = 1 doc['fields']['uuid'] = uuid_default() doc['fields']['parent_directory'] = [ home_dir.uuid, home_dir.version, home_dir.is_history ] else: # Update existing doc or create new try: existing_doc = Document2.objects.get_by_uuid( doc['fields']['uuid'], owner=request.user) doc['pk'] = existing_doc.pk except FilesystemException, e: LOG.warn( 'Could not find document with UUID: %s, will create a new document on import.', doc['fields']['uuid']) doc['pk'] = None doc['fields']['version'] = 1 # Verify that parent exists, log warning and nullify parent if not found if doc['fields']['parent_directory']: uuid, version, is_history = doc['fields']['parent_directory'] if not Document2.objects.filter( uuid=uuid, version=version, is_history=is_history).exists(): LOG.warn( 'Could not find parent document with UUID: %s, will set parent to home directory' % uuid) doc['fields']['parent_directory'] = [ home_dir.uuid, home_dir.version, home_dir.is_history ] # Verify that dependencies exist, raise critical error if any dependency not found if doc['fields']['dependencies']: for uuid, version, is_history in doc['fields']['dependencies']: if not Document2.objects.filter( uuid=uuid, version=version, is_history=is_history).exists(): raise PopupException( _('Cannot import document, dependency with UUID: %s not found.' ) % uuid) # Set last modified date to now doc['fields']['last_modified'] = datetime.now().replace( microsecond=0).isoformat() docs.append(doc)
def upload(cls): queries = [ (uuid_default(), 0, "select emps.id from emps where emps.name = 'Joe' group by emps.mgr, emps.id;", BaseTestOptimizerApi.DATABASE), (uuid_default(), 0, "select emps.name from emps where emps.num = 007 group by emps.state, emps.name;", BaseTestOptimizerApi.DATABASE), (uuid_default(), 0, "select Part.partkey, max(Part.salary), Part.name, Part.type from %s.Part where Part.yyprice > 2095" % BaseTestOptimizerApi.DATABASE, BaseTestOptimizerApi.DATABASE), (uuid_default(), 0, "select Part.partkey, Part.name, Part.mfgr FROM Part WHERE Part.name LIKE '%red';", BaseTestOptimizerApi.DATABASE), (uuid_default(), 0, "select count(*) as loans from account a where a.account_state_id in (5,9);", BaseTestOptimizerApi.DATABASE), (uuid_default(), 0, "select orders.key, orders.id from orders where orders.price < 9999", BaseTestOptimizerApi.DATABASE), (uuid_default(), 0, "select x from x join y where x.a = y.a;", BaseTestOptimizerApi.DATABASE), # DDL (uuid_default(), 0, ' '.join('''CREATE TABLE `web_logs`( `_version_` bigint, `app` string COMMENT 'app', `bytes` smallint COMMENT 'http://demo.gethue.com/ is', `city` string COMMENT 'city', `client_ip` string, `code` tinyint, `country_code` string, `country_code3` string, `country_name` string, `device_family` string, `extension` string, `latitude` float, `longitude` float, `method` string, `os_family` string, `os_major` string, `protocol` string, `record` string, `referer` string, `region_code` bigint, `request` string, `subapp` string, `time` string, `url` string, `user_agent` string, `user_agent_family` string, `user_agent_major` string, `id` string) COMMENT 'http://demo.gethue.com/ rocks!' PARTITIONED BY ( `date` string) '''.splitlines()), BaseTestOptimizerApi.DATABASE) ] resp = cls.api.upload(data=queries, data_type='queries', source_platform='hive') assert_true('status' in resp, resp) assert_true('count' in resp, resp) assert_true('state' in resp['status'], resp) assert_true('workloadId' in resp['status'], resp) assert_true('failedQueries' in resp['status'], resp) assert_true('successQueries' in resp['status'], resp) assert_true( resp['status']['state'] in ('WAITING', 'FINISHED', 'FAILED'), resp['status']['state']) resp = cls.api.upload_status(workload_id=resp['status']['workloadId']) assert_true('status' in resp, resp) assert_true('state' in resp['status'], resp) assert_true('workloadId' in resp['status'], resp) i = 0 while i < 60 and resp['status']['state'] not in ('FINISHED', 'FAILED'): resp = cls.api.upload_status( workload_id=resp['status']['workloadId']) i += 1 time.sleep(1) LOG.info('Upload state: %(state)s' % resp['status']) assert_true(i < 60 and resp['status']['state'] == 'FINISHED', resp) assert_equal(resp['status']['successQueries'], 8, resp)
for doc in documents: # If doc is not owned by current user, make a copy of the document if doc['fields']['owner'][0] != request.user.username: doc['fields']['owner'] = [request.user.username] <<<<<<< HEAD owner = doc['fields']['owner'][0] # TODO: Check if this should be replaced by get_by_uuid if Document2.objects.filter(uuid=doc['fields']['uuid'], owner__username=owner).exists(): doc['pk'] = Document2.objects.get(uuid=doc['fields']['uuid'], owner__username=owner).pk else: ======= >>>>>>> upstream/master doc['pk'] = None doc['fields']['version'] = 1 doc['fields']['uuid'] = uuid_default() doc['fields']['parent_directory'] = [home_dir.uuid, home_dir.version, home_dir.is_history] else: # Update existing doc or create new try: existing_doc = Document2.objects.get_by_uuid(doc['fields']['uuid'], owner=request.user) doc['pk'] = existing_doc.pk except FilesystemException, e: LOG.warn('Could not find document with UUID: %s, will create a new document on import.', doc['fields']['uuid']) doc['pk'] = None doc['fields']['version'] = 1 # Verify that parent exists, log warning and nullify parent if not found if doc['fields']['parent_directory']: uuid, version, is_history = doc['fields']['parent_directory'] if not Document2.objects.filter(uuid=uuid, version=version, is_history=is_history).exists(): LOG.warn('Could not find parent document with UUID: %s, will set parent to home directory' % uuid)