def prep_for_deserialize(model, record, using, init_list=None): # pylint:disable=unused-argument """ Convert a record from SFDC (decoded JSON) to dict(model string, pk, fields) If fixes fields of some types. If names of required fields `init_list `are specified, then only these fields are processed. """ # TODO the parameter 'using' is not currently important. attribs = record.pop('attributes') # NOQA pylint:disable=unused-variable mod = model.__module__.split('.') if hasattr(model._meta, 'app_label'): app_label = getattr(model._meta, 'app_label') elif mod[-1] == 'models': app_label = mod[-2] else: raise ImproperlyConfigured("Can't discover the app_label for %s, you must specify it via model meta options.") if len(record.keys()) == 1 and model._meta.db_table in record: # this is for objects with ManyToManyField and OneToOneField while len(record) == 1: record = list(record.values())[0] if record is None: return None fields = prep_for_deserialize_inner(model, record, init_list=init_list) if init_list and set(init_list).difference(fields).difference([SF_PK]): raise DatabaseError("Not found some expected fields") return dict( model='.'.join([app_label, model.__name__]), pk=record.pop('Id'), fields=fields, )
def execute(self, q, args=()): """ Send a query to the Salesforce API. """ # pylint:disable=too-many-branches self.rowcount = None response = None if self.query is None: self.execute_select(q, args) else: response = self.execute_django(q, args) if isinstance(response, list): return # the encoding is detected automatically, e.g. from headers if response and response.text: # parse_float set to decimal.Decimal to avoid precision errors when # converting from the json number to a float and then to a Decimal object # on a model's DecimalField. This converts from json number directly # to a Decimal object data = response.json(parse_float=decimal.Decimal) # a SELECT query if 'totalSize' in data: # SELECT self.rowcount = data['totalSize'] # a successful INSERT query, return after getting PK elif 'success' in data and 'id' in data: self.lastrowid = data['id'] return elif 'compositeResponse' in data: # TODO treat error reporting for composite requests self.lastrowid = [ x['body']['id'] if x['body'] is not None else x['referenceId'] for x in data['compositeResponse'] ] return elif data['hasErrors'] is False: # it is from Composite Batch request # save id from bulk_create even if Django don't use it if data['results'] and data['results'][0]['result']: self.lastrowid = [ item['result']['id'] for item in data['results'] ] return # something we don't recognize else: raise DatabaseError(data) if not q.upper().startswith('SELECT COUNT() FROM'): self.first_row = data['records'][0] if data['records'] else None
def execute_django(self, soql, args=()): """ Fixed execute for queries coming from Django query compilers """ response = None sqltype = soql.split(None, 1)[0].upper() if isinstance(self.query, subqueries.InsertQuery): response = self.execute_insert(self.query) elif isinstance(self.query, subqueries.UpdateQuery): response = self.execute_update(self.query) elif isinstance(self.query, subqueries.DeleteQuery): response = self.execute_delete(self.query) elif isinstance(self.query, RawQuery): self.execute_select(soql, args) elif sqltype in ('SAVEPOINT', 'ROLLBACK', 'RELEASE'): log.info("Ignored SQL command '%s'", sqltype) return elif isinstance(self.query, Query): self.execute_select(soql, args) else: raise DatabaseError("Unsupported query: type %s: %s" % (type(self.query), self.query)) return response
def as_sql(self, with_limits=True, with_col_aliases=False, subquery=False): # pylint:disable=arguments-differ # the argument `subquery` is only for old Django 1.10 # pylint:disable=too-many-locals,too-many-branches,too-many-statements """ Creates the SQL for this query. Returns the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included in the query. """ # After executing the query, we must get rid of any joins the query # setup created. So, take note of alias counts before the query ran. # However we do not want to get rid of stuff done in pre_sql_setup(), # as the pre_sql_setup will modify query state in a way that forbids # another run of it. if with_limits and self.query.low_mark == self.query.high_mark: return '', () self.subquery = subquery refcounts_before = self.query.alias_refcount.copy() soql_trans = self.query_topology() try: extra_select, order_by, group_by = self.pre_sql_setup() if with_limits and self.query.low_mark == self.query.high_mark: return '', () distinct_fields = self.get_distinct() # This must come after 'select', 'ordering', and 'distinct' -- see # docstring of get_from_clause() for details. from_, f_params = self.get_from_clause() where, w_params = self.compile( self.where) if self.where is not None else ("", []) having, h_params = self.compile( self.having) if self.having is not None else ("", []) params = [] result = ['SELECT'] if self.query.distinct: result.append( self.connection.ops.distinct_sql(distinct_fields)) out_cols = [] col_idx = 1 for _, (s_sql, s_params), alias in self.select + extra_select: if alias: # fixed by removing 'AS' s_sql = '%s %s' % (s_sql, self.connection.ops.quote_name(alias)) elif with_col_aliases and not isinstance( with_col_aliases, salesforce.backend.base.DatabaseWrapper): s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx) col_idx += 1 if soql_trans and re.match(r'^\w+\.\w+$', s_sql): tab_name, col_name = s_sql.split('.') s_sql = '%s.%s' % (soql_trans[tab_name], col_name) params.extend(s_params) out_cols.append(s_sql) result.append(', '.join(out_cols)) result.append('FROM') result.extend(from_) params.extend(f_params) if where: result.append('WHERE %s' % where) params.extend(w_params) grouping = [] for g_sql, g_params in group_by: grouping.append(g_sql) params.extend(g_params) if grouping: if distinct_fields: raise NotImplementedError( "annotate() + distinct(fields) is not implemented.") if not order_by: order_by = self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) if having: result.append('HAVING %s' % having) params.extend(h_params) if order_by: ordering = [] for _, (o_sql, o_params, _) in order_by: ordering.append(o_sql) params.extend(o_params) result.append('ORDER BY %s' % ', '.join(ordering)) if with_limits: if self.query.high_mark is not None: result.append('LIMIT %d' % (self.query.high_mark - self.query.low_mark)) if self.query.low_mark: if self.query.high_mark is None: val = self.connection.ops.no_limit_value() if val: result.append('LIMIT %d' % val) result.append('OFFSET %d' % self.query.low_mark) if self.query.select_for_update and self.connection.features.has_select_for_update: if self.connection.get_autocommit(): raise TransactionManagementError( "select_for_update cannot be used outside of a transaction." ) # If we've been asked for a NOWAIT query but the backend does # not support it, raise a DatabaseError otherwise we could get # an unexpected deadlock. nowait = self.query.select_for_update_nowait if nowait and not self.connection.features.has_select_for_update_nowait: raise DatabaseError( 'NOWAIT is not supported on this database backend.') result.append( self.connection.ops.for_update_sql(nowait=nowait)) return ' '.join(result), tuple(params) finally: # Finally do cleanup - get rid of the joins we created above. self.query.reset_refcounts(refcounts_before)
def convert_lead(lead, converted_status=None, **kwargs): """ Convert `lead` using the `convertLead()` endpoint exposed by the SOAP API. Parameters: `lead` -- a Lead object that has not been converted yet. `converted_status` -- valid LeadStatus value for a converted lead. Not necessary if only one converted status is configured for Leads. kwargs: additional optional parameters according docs https://developer.salesforce.com/docs/atlas.en-us.api.meta/api/sforce_api_calls_convertlead.htm e.g. `accountId` if the Lead should be merged with an existing Account. Return value: {'accountId':.., 'contactId':.., 'leadId':.., 'opportunityId':.., 'success':..} -- BEWARE -- The current implementation won't work in case your `Contact`, `Account` or `Opportunity` objects have some custom **and** required fields. This arises from the fact that `convertLead()` is only meant to deal with standard Salesforce fields, so it does not really care about populating custom fields at insert time. One workaround is to map a custom required field in your `Lead` object to every custom required field in the target objects (i.e., `Contact`, `Opportunity` or `Account`). Follow the instructions at https://help.salesforce.com/apex/HTViewHelpDoc?id=customize_mapleads.htm for more details. """ # pylint:disable=protected-access if not beatbox: raise InterfaceError( "To use convert_lead, you'll need to install the Beatbox library.") accepted_kw = set( ('accountId', 'contactId', 'doNotCreateOpportunity', 'opportunityName', 'overwriteLeadSource', 'ownerId', 'sendNotificationEmail')) assert all(x in accepted_kw for x in kwargs) db_alias = lead._state.db if converted_status is None: converted_status = connections[ db_alias].introspection.converted_lead_status soap_client = get_soap_client(db_alias) # convert kwargs['leadId'] = lead.pk kwargs['convertedStatus'] = converted_status response = soap_client.convertLead(kwargs) ret = dict((x._name[1], str(x)) for x in response) if "errors" in str(ret): raise DatabaseError( "The Lead conversion failed: {0}, leadId={1}".format( ret['errors'], ret['leadId'])) return ret