summaryrefslogtreecommitdiff
path: root/django/db/models/sql
diff options
context:
space:
mode:
authorAnssi Kääriäinen <akaariai@gmail.com>2014-01-18 11:09:43 +0200
committerAnssi Kääriäinen <akaariai@gmail.com>2014-01-18 11:46:19 +0200
commit20bab2cf9d02a5c6477d8aac066a635986e0d3f3 (patch)
treee1871348fba181365f89963ab934deaed79f996f /django/db/models/sql
parentb87c59b04bc549a5ba42023d04e4be7a4737f7d9 (diff)
Fixed #16187 -- refactored ORM lookup system
Allowed users to specify which lookups or transforms ("nested lookus") are available for fields. The implementation is now class based. Squashed commit of the following: commit fa7a7195f1952a9c8dea7f6e89ee13f81757eda7 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Jan 18 10:53:24 2014 +0200 Added lookup registration API docs commit eb1c8ce164325e0d8641f14202e12486c70efdb6 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Tue Jan 14 18:59:36 2014 +0200 Release notes and other minor docs changes commit 11501c29c9352d17f22f3a0f59d3b805913dedcc Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Jan 12 20:53:03 2014 +0200 Forgot to add custom_lookups tests in prev commit commit 83173b960ea7eb2b24d573f326be59948df33536 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Jan 12 19:59:12 2014 +0200 Renamed Extract -> Transform commit 3b18d9f3a1bcdd93280f79654eba0efa209377bd Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Jan 12 19:51:53 2014 +0200 Removed suggestion of temporary lookup registration from docs commit 21d0c7631c161fc0c67911480be5d3f13f1afa68 Merge: 2509006 f2dc442 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Jan 12 09:38:23 2014 -0800 Merge pull request #2 from mjtamlyn/lookups_3 Reworked custom lookups docs. commit f2dc4429a1da04c858364972eea57a35a868dab4 Author: Marc Tamlyn <marc.tamlyn@gmail.com> Date: Sun Jan 12 13:15:05 2014 +0000 Reworked custom lookups docs. Mostly just formatting and rewording, but also replaced the example using ``YearExtract`` to use an example which is unlikely to ever be possible directly in the ORM. commit 250900650628d1f11beadb22814abd666029fb81 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Jan 12 13:19:13 2014 +0200 Removed unused import commit 4fba5dfaa022653ffa72497258ffd8f8b7476f92 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Jan 11 22:34:41 2014 +0200 Added docs to index commit 6d53963f375c77a1f287833b19b976d23f36c30b Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Jan 11 22:10:24 2014 +0200 Dead code removal commit f9cc0390078e21f1ea5a7bc1f15b09f8f6b0904d Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Jan 11 19:00:43 2014 +0200 A new try for docs commit 33aa18a6e3c831930bda0028222a26f9c1d96e66 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Jan 11 14:57:12 2014 +0200 Renamed get_cols to get_group_by_cols commit c7d5f8661b7d364962bed2e6f81161c1b4f1bcc3 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Jan 11 14:45:53 2014 +0200 Altered query string customization for backends vendors The new way is trying to call first method 'as_' + connection.vendor. If that doesn't exist, then call as_sql(). Also altered how lookup registration is done. There is now RegisterLookupMixin class that is used by Field, Extract and sql.Aggregate. This allows one to register lookups for extracts and aggregates in the same way lookup registration is done for fields. commit 90e7004ec14e15503f828cc9bde2a7dab593814d Merge: 66649ff f7c2c0a Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Jan 11 13:21:01 2014 +0200 Merge branch 'master' into lookups_3 commit 66649ff891c7c73c7eecf6038c9a6802611b5d8a Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Jan 11 13:16:01 2014 +0200 Some rewording in docs commit 31b8faa62714b4b6b6057a9f5cc106c4dd73caab Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Dec 29 15:52:29 2013 +0200 Cleanup based on review comments commit 1016159f34674c0df871ed891cde72be8340bb5d Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Dec 28 18:37:04 2013 +0200 Proof-of-concept fix for #16731 Implemented only for SQLite and PostgreSQL, and only for startswith and istartswith lookups. commit 193cd097ca8f2cc6a911e57b8e3fb726f96ee6a6 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Dec 28 17:57:58 2013 +0200 Fixed #11722 -- iexact=F() produced invalid SQL commit 08ed3c3b49e100ed9019831e770c25c8f61b70f9 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Dec 21 23:59:52 2013 +0200 Made Lookup and Extract available from django.db.models commit b99c8d83c972786c6fcd0e84c9e5cb08c1368300 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Dec 21 23:06:29 2013 +0200 Fixed review notes by Loic commit 049eebc0703c151127f4f0265beceea7b8b39e72 Merge: ed8fab7 b80a835 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Dec 21 22:53:10 2013 +0200 Merge branch 'master' into lookups_3 Conflicts: django/db/models/fields/__init__.py django/db/models/sql/compiler.py django/db/models/sql/query.py tests/null_queries/tests.py commit ed8fab7fe8867ff3eb801c3697a426478387bb2f Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Dec 21 22:47:23 2013 +0200 Made Extracts aware of full lookup path commit 27a57b7aed91b2f346abc4a77da838bffa17c727 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Dec 1 21:10:11 2013 +0200 Removed debugger import commit 074e0f5aca0572e368c11e6d2c73c9026e7d63d7 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Dec 1 21:02:16 2013 +0200 GIS lookup support added commit 760e28e72bae475b442b026650969b0d182dbe53 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Dec 1 20:04:31 2013 +0200 Removed usage of Constraint, used Lookup instead commit eac47766844b90e7d3269e7a8c012eee34ec0093 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Dec 1 02:22:30 2013 +0200 Minor cleanup of Lookup API commit 2adf50428d59a783078b0da3d5d035106640c899 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sun Dec 1 02:14:19 2013 +0200 Added documentation, polished implementation commit 32c04357a87e3727a34f8c5e6ec0114d1fbbb303 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Nov 30 23:10:15 2013 +0200 Avoid OrderedDict creation on lookup aggregate check commit 7c8b3a32cc17b4dbca160921d48125f1631e0df4 Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Sat Nov 30 23:04:34 2013 +0200 Implemented nested lookups But there is no support of using lookups outside filtering yet. commit 4d219d4cdef21d9c14e5d6b9299d583d1975fcba Author: Anssi Kääriäinen <akaariai@gmail.com> Date: Wed Nov 27 22:07:30 2013 +0200 Initial implementation of custom lookups
Diffstat (limited to 'django/db/models/sql')
-rw-r--r--django/db/models/sql/aggregates.py10
-rw-r--r--django/db/models/sql/compiler.py63
-rw-r--r--django/db/models/sql/datastructures.py23
-rw-r--r--django/db/models/sql/expressions.py6
-rw-r--r--django/db/models/sql/query.py179
-rw-r--r--django/db/models/sql/subqueries.py20
-rw-r--r--django/db/models/sql/where.py27
7 files changed, 203 insertions, 125 deletions
diff --git a/django/db/models/sql/aggregates.py b/django/db/models/sql/aggregates.py
index 8542a330c6..aef8b493bb 100644
--- a/django/db/models/sql/aggregates.py
+++ b/django/db/models/sql/aggregates.py
@@ -4,6 +4,7 @@ Classes to represent the default SQL aggregate functions
import copy
from django.db.models.fields import IntegerField, FloatField
+from django.db.models.lookups import RegisterLookupMixin
__all__ = ['Aggregate', 'Avg', 'Count', 'Max', 'Min', 'StdDev', 'Sum', 'Variance']
@@ -14,7 +15,7 @@ ordinal_aggregate_field = IntegerField()
computed_aggregate_field = FloatField()
-class Aggregate(object):
+class Aggregate(RegisterLookupMixin):
"""
Default SQL Aggregate.
"""
@@ -93,6 +94,13 @@ class Aggregate(object):
return self.sql_template % substitutions, params
+ def get_group_by_cols(self):
+ return []
+
+ @property
+ def output_type(self):
+ return self.field
+
class Avg(Aggregate):
is_computed = True
diff --git a/django/db/models/sql/compiler.py b/django/db/models/sql/compiler.py
index 41bba93206..123427cf8b 100644
--- a/django/db/models/sql/compiler.py
+++ b/django/db/models/sql/compiler.py
@@ -45,7 +45,7 @@ class SQLCompiler(object):
if self.query.select_related and not self.query.related_select_cols:
self.fill_related_selections()
- def quote_name_unless_alias(self, name):
+ def __call__(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
@@ -61,6 +61,22 @@ class SQLCompiler(object):
self.quote_cache[name] = r
return r
+ def quote_name_unless_alias(self, name):
+ """
+ A wrapper around connection.ops.quote_name that doesn't quote aliases
+ for table names. This avoids problems with some SQL dialects that treat
+ quoted strings specially (e.g. PostgreSQL).
+ """
+ return self(name)
+
+ def compile(self, node):
+ vendor_impl = getattr(
+ node, 'as_' + self.connection.vendor, None)
+ if vendor_impl:
+ return vendor_impl(self, self.connection)
+ else:
+ return node.as_sql(self, self.connection)
+
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Creates the SQL for this query. Returns the SQL string and list of
@@ -88,11 +104,9 @@ class SQLCompiler(object):
# docstring of get_from_clause() for details.
from_, f_params = self.get_from_clause()
- qn = self.quote_name_unless_alias
-
- where, w_params = self.query.where.as_sql(qn=qn, connection=self.connection)
- having, h_params = self.query.having.as_sql(qn=qn, connection=self.connection)
- having_group_by = self.query.having.get_cols()
+ where, w_params = self.compile(self.query.where)
+ having, h_params = self.compile(self.query.having)
+ having_group_by = self.query.having.get_group_by_cols()
params = []
for val in six.itervalues(self.query.extra_select):
params.extend(val[1])
@@ -180,7 +194,7 @@ class SQLCompiler(object):
(without the table names) are given unique aliases. This is needed in
some cases to avoid ambiguity with nested queries.
"""
- qn = self.quote_name_unless_alias
+ qn = self
qn2 = self.connection.ops.quote_name
result = ['(%s) AS %s' % (col[0], qn2(alias)) for alias, col in six.iteritems(self.query.extra_select)]
params = []
@@ -213,7 +227,7 @@ class SQLCompiler(object):
aliases.add(r)
col_aliases.add(col[1])
else:
- col_sql, col_params = col.as_sql(qn, self.connection)
+ col_sql, col_params = self.compile(col)
result.append(col_sql)
params.extend(col_params)
@@ -229,7 +243,7 @@ class SQLCompiler(object):
max_name_length = self.connection.ops.max_name_length()
for alias, aggregate in self.query.aggregate_select.items():
- agg_sql, agg_params = aggregate.as_sql(qn, self.connection)
+ agg_sql, agg_params = self.compile(aggregate)
if alias is None:
result.append(agg_sql)
else:
@@ -267,7 +281,7 @@ class SQLCompiler(object):
result = []
if opts is None:
opts = self.query.get_meta()
- qn = self.quote_name_unless_alias
+ qn = self
qn2 = self.connection.ops.quote_name
aliases = set()
only_load = self.deferred_to_columns()
@@ -319,7 +333,7 @@ class SQLCompiler(object):
Note that this method can alter the tables in the query, and thus it
must be called before get_from_clause().
"""
- qn = self.quote_name_unless_alias
+ qn = self
qn2 = self.connection.ops.quote_name
result = []
opts = self.query.get_meta()
@@ -352,7 +366,7 @@ class SQLCompiler(object):
ordering = (self.query.order_by
or self.query.get_meta().ordering
or [])
- qn = self.quote_name_unless_alias
+ qn = self
qn2 = self.connection.ops.quote_name
distinct = self.query.distinct
select_aliases = self._select_aliases
@@ -490,7 +504,7 @@ class SQLCompiler(object):
ordering and distinct must be done first.
"""
result = []
- qn = self.quote_name_unless_alias
+ qn = self
qn2 = self.connection.ops.quote_name
first = True
from_params = []
@@ -508,8 +522,7 @@ class SQLCompiler(object):
extra_cond = join_field.get_extra_restriction(
self.query.where_class, alias, lhs)
if extra_cond:
- extra_sql, extra_params = extra_cond.as_sql(
- qn, self.connection)
+ extra_sql, extra_params = self.compile(extra_cond)
extra_sql = 'AND (%s)' % extra_sql
from_params.extend(extra_params)
else:
@@ -541,7 +554,7 @@ class SQLCompiler(object):
"""
Returns a tuple representing the SQL elements in the "group by" clause.
"""
- qn = self.quote_name_unless_alias
+ qn = self
result, params = [], []
if self.query.group_by is not None:
select_cols = self.query.select + self.query.related_select_cols
@@ -560,7 +573,7 @@ class SQLCompiler(object):
if isinstance(col, (list, tuple)):
sql = '%s.%s' % (qn(col[0]), qn(col[1]))
elif hasattr(col, 'as_sql'):
- sql, col_params = col.as_sql(qn, self.connection)
+ self.compile(col)
else:
sql = '(%s)' % str(col)
if sql not in seen:
@@ -784,7 +797,7 @@ class SQLCompiler(object):
return result
def as_subquery_condition(self, alias, columns, qn):
- inner_qn = self.quote_name_unless_alias
+ inner_qn = self
qn2 = self.connection.ops.quote_name
if len(columns) == 1:
sql, params = self.as_sql()
@@ -895,9 +908,9 @@ class SQLDeleteCompiler(SQLCompiler):
"""
assert len(self.query.tables) == 1, \
"Can only delete from one table at a time."
- qn = self.quote_name_unless_alias
+ qn = self
result = ['DELETE FROM %s' % qn(self.query.tables[0])]
- where, params = self.query.where.as_sql(qn=qn, connection=self.connection)
+ where, params = self.compile(self.query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(params)
@@ -913,7 +926,7 @@ class SQLUpdateCompiler(SQLCompiler):
if not self.query.values:
return '', ()
table = self.query.tables[0]
- qn = self.quote_name_unless_alias
+ qn = self
result = ['UPDATE %s' % qn(table)]
result.append('SET')
values, update_params = [], []
@@ -933,7 +946,7 @@ class SQLUpdateCompiler(SQLCompiler):
val = SQLEvaluator(val, self.query, allow_joins=False)
name = field.column
if hasattr(val, 'as_sql'):
- sql, params = val.as_sql(qn, self.connection)
+ sql, params = self.compile(val)
values.append('%s = %s' % (qn(name), sql))
update_params.extend(params)
elif val is not None:
@@ -944,7 +957,7 @@ class SQLUpdateCompiler(SQLCompiler):
if not values:
return '', ()
result.append(', '.join(values))
- where, params = self.query.where.as_sql(qn=qn, connection=self.connection)
+ where, params = self.compile(self.query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(update_params + params)
@@ -1024,11 +1037,11 @@ class SQLAggregateCompiler(SQLCompiler):
parameters.
"""
if qn is None:
- qn = self.quote_name_unless_alias
+ qn = self
sql, params = [], []
for aggregate in self.query.aggregate_select.values():
- agg_sql, agg_params = aggregate.as_sql(qn, self.connection)
+ agg_sql, agg_params = self.compile(aggregate)
sql.append(agg_sql)
params.extend(agg_params)
sql = ', '.join(sql)
diff --git a/django/db/models/sql/datastructures.py b/django/db/models/sql/datastructures.py
index f45ecaf76d..421c3cd860 100644
--- a/django/db/models/sql/datastructures.py
+++ b/django/db/models/sql/datastructures.py
@@ -5,18 +5,27 @@ the SQL domain.
class Col(object):
- def __init__(self, alias, col):
- self.alias = alias
- self.col = col
+ def __init__(self, alias, target, source):
+ self.alias, self.target, self.source = alias, target, source
def as_sql(self, qn, connection):
- return '%s.%s' % (qn(self.alias), self.col), []
+ return "%s.%s" % (qn(self.alias), qn(self.target.column)), []
- def prepare(self):
- return self
+ @property
+ def output_type(self):
+ return self.source
def relabeled_clone(self, relabels):
- return self.__class__(relabels.get(self.alias, self.alias), self.col)
+ return self.__class__(relabels.get(self.alias, self.alias), self.target, self.source)
+
+ def get_group_by_cols(self):
+ return [(self.alias, self.target.column)]
+
+ def get_lookup(self, name):
+ return self.output_type.get_lookup(name)
+
+ def prepare(self):
+ return self
class EmptyResultSet(Exception):
diff --git a/django/db/models/sql/expressions.py b/django/db/models/sql/expressions.py
index 9f29e2ace5..e31eaa8a2f 100644
--- a/django/db/models/sql/expressions.py
+++ b/django/db/models/sql/expressions.py
@@ -24,11 +24,11 @@ class SQLEvaluator(object):
(change_map.get(col[0], col[0]), col[1])))
return clone
- def get_cols(self):
+ def get_group_by_cols(self):
cols = []
for node, col in self.cols:
- if hasattr(node, 'get_cols'):
- cols.extend(node.get_cols())
+ if hasattr(node, 'get_group_by_cols'):
+ cols.extend(node.get_group_by_cols())
elif isinstance(col, tuple):
cols.append(col)
return cols
diff --git a/django/db/models/sql/query.py b/django/db/models/sql/query.py
index c3c8e55793..db4e6744bf 100644
--- a/django/db/models/sql/query.py
+++ b/django/db/models/sql/query.py
@@ -19,6 +19,7 @@ from django.db.models.constants import LOOKUP_SEP
from django.db.models.aggregates import refs_aggregate
from django.db.models.expressions import ExpressionNode
from django.db.models.fields import FieldDoesNotExist
+from django.db.models.lookups import Transform
from django.db.models.query_utils import Q
from django.db.models.related import PathInfo
from django.db.models.sql import aggregates as base_aggregates_module
@@ -1028,13 +1029,16 @@ class Query(object):
# Add the aggregate to the query
aggregate.add_to_query(self, alias, col=col, source=source, is_summary=is_summary)
- def prepare_lookup_value(self, value, lookup_type, can_reuse):
+ def prepare_lookup_value(self, value, lookups, can_reuse):
+ # Default lookup if none given is exact.
+ if len(lookups) == 0:
+ lookups = ['exact']
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value.
if value is None:
- if lookup_type not in ('exact', 'iexact'):
+ if lookups[-1] not in ('exact', 'iexact'):
raise ValueError("Cannot use None as a query value")
- lookup_type = 'isnull'
+ lookups[-1] = 'isnull'
value = True
elif callable(value):
warnings.warn(
@@ -1055,40 +1059,54 @@ class Query(object):
# stage. Using DEFAULT_DB_ALIAS isn't nice, but it is the best we
# can do here. Similar thing is done in is_nullable(), too.
if (connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and
- lookup_type == 'exact' and value == ''):
+ lookups[-1] == 'exact' and value == ''):
value = True
- lookup_type = 'isnull'
- return value, lookup_type
+ lookups[-1] = ['isnull']
+ return value, lookups
def solve_lookup_type(self, lookup):
"""
Solve the lookup type from the lookup (eg: 'foobar__id__icontains')
"""
- lookup_type = 'exact' # Default lookup type
- lookup_parts = lookup.split(LOOKUP_SEP)
- num_parts = len(lookup_parts)
- if (len(lookup_parts) > 1 and lookup_parts[-1] in self.query_terms
- and (not self._aggregates or lookup not in self._aggregates)):
- # Traverse the lookup query to distinguish related fields from
- # lookup types.
- lookup_model = self.model
- for counter, field_name in enumerate(lookup_parts):
- try:
- lookup_field = lookup_model._meta.get_field(field_name)
- except FieldDoesNotExist:
- # Not a field. Bail out.
- lookup_type = lookup_parts.pop()
- break
- # Unless we're at the end of the list of lookups, let's attempt
- # to continue traversing relations.
- if (counter + 1) < num_parts:
- try:
- lookup_model = lookup_field.rel.to
- except AttributeError:
- # Not a related field. Bail out.
- lookup_type = lookup_parts.pop()
- break
- return lookup_type, lookup_parts
+ lookup_splitted = lookup.split(LOOKUP_SEP)
+ if self._aggregates:
+ aggregate, aggregate_lookups = refs_aggregate(lookup_splitted, self.aggregates)
+ if aggregate:
+ return aggregate_lookups, (), aggregate
+ _, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())
+ field_parts = lookup_splitted[0:len(lookup_splitted) - len(lookup_parts)]
+ if len(lookup_parts) == 0:
+ lookup_parts = ['exact']
+ elif len(lookup_parts) > 1:
+ if not field_parts:
+ raise FieldError(
+ 'Invalid lookup "%s" for model %s".' %
+ (lookup, self.get_meta().model.__name__))
+ return lookup_parts, field_parts, False
+
+ def build_lookup(self, lookups, lhs, rhs):
+ lookups = lookups[:]
+ while lookups:
+ lookup = lookups[0]
+ next = lhs.get_lookup(lookup)
+ if next:
+ if len(lookups) == 1:
+ # This was the last lookup, so return value lookup.
+ if issubclass(next, Transform):
+ lookups.append('exact')
+ lhs = next(lhs, lookups)
+ else:
+ return next(lhs, rhs)
+ else:
+ lhs = next(lhs, lookups)
+ # A field's get_lookup() can return None to opt for backwards
+ # compatibility path.
+ elif len(lookups) > 2:
+ raise FieldError(
+ "Unsupported lookup for field '%s'" % lhs.output_type.name)
+ else:
+ return None
+ lookups = lookups[1:]
def build_filter(self, filter_expr, branch_negated=False, current_negated=False,
can_reuse=None, connector=AND):
@@ -1118,21 +1136,24 @@ class Query(object):
is responsible for unreffing the joins used.
"""
arg, value = filter_expr
- lookup_type, parts = self.solve_lookup_type(arg)
- if not parts:
+ if not arg:
raise FieldError("Cannot parse keyword query %r" % arg)
+ lookups, parts, reffed_aggregate = self.solve_lookup_type(arg)
# Work out the lookup type and remove it from the end of 'parts',
# if necessary.
- value, lookup_type = self.prepare_lookup_value(value, lookup_type, can_reuse)
+ value, lookups = self.prepare_lookup_value(value, lookups, can_reuse)
used_joins = getattr(value, '_used_joins', [])
clause = self.where_class()
- if self._aggregates:
- for alias, aggregate in self.aggregates.items():
- if alias in (parts[0], LOOKUP_SEP.join(parts)):
- clause.add((aggregate, lookup_type, value), AND)
- return clause, []
+ if reffed_aggregate:
+ condition = self.build_lookup(lookups, reffed_aggregate, value)
+ if not condition:
+ # Backwards compat for custom lookups
+ assert len(lookups) == 1
+ condition = (reffed_aggregate, lookups[0], value)
+ clause.add(condition, AND)
+ return clause, []
opts = self.get_meta()
alias = self.get_initial_alias()
@@ -1154,11 +1175,31 @@ class Query(object):
targets, alias, join_list = self.trim_joins(sources, join_list, path)
if hasattr(field, 'get_lookup_constraint'):
- constraint = field.get_lookup_constraint(self.where_class, alias, targets, sources,
- lookup_type, value)
+ # For now foreign keys get special treatment. This should be
+ # refactored when composite fields lands.
+ condition = field.get_lookup_constraint(self.where_class, alias, targets, sources,
+ lookups, value)
+ lookup_type = lookups[-1]
else:
- constraint = (Constraint(alias, targets[0].column, field), lookup_type, value)
- clause.add(constraint, AND)
+ assert(len(targets) == 1)
+ col = Col(alias, targets[0], field)
+ condition = self.build_lookup(lookups, col, value)
+ if not condition:
+ # Backwards compat for custom lookups
+ if lookups[0] not in self.query_terms:
+ raise FieldError(
+ "Join on field '%s' not permitted. Did you "
+ "misspell '%s' for the lookup type?" %
+ (col.output_type.name, lookups[0]))
+ if len(lookups) > 1:
+ raise FieldError("Nested lookup '%s' not supported." %
+ LOOKUP_SEP.join(lookups))
+ condition = (Constraint(alias, targets[0].column, field), lookups[0], value)
+ lookup_type = lookups[-1]
+ else:
+ lookup_type = condition.lookup_name
+
+ clause.add(condition, AND)
require_outer = lookup_type == 'isnull' and value is True and not current_negated
if current_negated and (lookup_type != 'isnull' or value is False):
@@ -1175,7 +1216,8 @@ class Query(object):
# (col IS NULL OR col != someval)
# <=>
# NOT (col IS NOT NULL AND col = someval).
- clause.add((Constraint(alias, targets[0].column, None), 'isnull', False), AND)
+ lookup_class = targets[0].get_lookup('isnull')
+ clause.add(lookup_class(Col(alias, targets[0], sources[0]), False), AND)
return clause, used_joins if not require_outer else ()
def add_filter(self, filter_clause):
@@ -1189,7 +1231,7 @@ class Query(object):
if not self._aggregates:
return False
if not isinstance(obj, Node):
- return (refs_aggregate(obj[0].split(LOOKUP_SEP), self.aggregates)
+ return (refs_aggregate(obj[0].split(LOOKUP_SEP), self.aggregates)[0]
or (hasattr(obj[1], 'contains_aggregate')
and obj[1].contains_aggregate(self.aggregates)))
return any(self.need_having(c) for c in obj.children)
@@ -1277,7 +1319,7 @@ class Query(object):
needed_inner = joinpromoter.update_join_types(self)
return target_clause, needed_inner
- def names_to_path(self, names, opts, allow_many):
+ def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False):
"""
Walks the names path and turns them PathInfo tuples. Note that a
single name in 'names' can generate multiple PathInfos (m2m for
@@ -1297,9 +1339,10 @@ class Query(object):
try:
field, model, direct, m2m = opts.get_field_by_name(name)
except FieldDoesNotExist:
- available = opts.get_all_field_names() + list(self.aggregate_select)
- raise FieldError("Cannot resolve keyword %r into field. "
- "Choices are: %s" % (name, ", ".join(available)))
+ # We didn't found the current field, so move position back
+ # one step.
+ pos -= 1
+ break
# Check if we need any joins for concrete inheritance cases (the
# field lives in parent, but we are currently in one of its
# children)
@@ -1334,15 +1377,14 @@ class Query(object):
final_field = field
targets = (field,)
break
+ if pos == -1 or (fail_on_missing and pos + 1 != len(names)):
+ self.raise_field_error(opts, name)
+ return path, final_field, targets, names[pos + 1:]
- if pos != len(names) - 1:
- if pos == len(names) - 2:
- raise FieldError(
- "Join on field %r not permitted. Did you misspell %r for "
- "the lookup type?" % (name, names[pos + 1]))
- else:
- raise FieldError("Join on field %r not permitted." % name)
- return path, final_field, targets
+ def raise_field_error(self, opts, name):
+ available = opts.get_all_field_names() + list(self.aggregate_select)
+ raise FieldError("Cannot resolve keyword %r into field. "
+ "Choices are: %s" % (name, ", ".join(available)))
def setup_joins(self, names, opts, alias, can_reuse=None, allow_many=True):
"""
@@ -1371,8 +1413,9 @@ class Query(object):
"""
joins = [alias]
# First, generate the path for the names
- path, final_field, targets = self.names_to_path(
- names, opts, allow_many)
+ path, final_field, targets, rest = self.names_to_path(
+ names, opts, allow_many, fail_on_missing=True)
+
# Then, add the path to the query's joins. Note that we can't trim
# joins at this stage - we will need the information about join type
# of the trimmed joins.
@@ -1387,8 +1430,6 @@ class Query(object):
alias = self.join(
connection, reuse=reuse, nullable=nullable, join_field=join.join_field)
joins.append(alias)
- if hasattr(final_field, 'field'):
- final_field = final_field.field
return final_field, targets, opts, joins, path
def trim_joins(self, targets, joins, path):
@@ -1451,17 +1492,19 @@ class Query(object):
# nothing
alias, col = query.select[0].col
if self.is_nullable(query.select[0].field):
- query.where.add((Constraint(alias, col, query.select[0].field), 'isnull', False), AND)
+ lookup_class = query.select[0].field.get_lookup('isnull')
+ lookup = lookup_class(Col(alias, query.select[0].field, query.select[0].field), False)
+ query.where.add(lookup, AND)
if alias in can_reuse:
- pk = query.select[0].field.model._meta.pk
+ select_field = query.select[0].field
+ pk = select_field.model._meta.pk
# Need to add a restriction so that outer query's filters are in effect for
# the subquery, too.
query.bump_prefix(self)
- query.where.add(
- (Constraint(query.select[0].col[0], pk.column, pk),
- 'exact', Col(alias, pk.column)),
- AND
- )
+ lookup_class = select_field.get_lookup('exact')
+ lookup = lookup_class(Col(query.select[0].col[0], pk, pk),
+ Col(alias, pk, pk))
+ query.where.add(lookup, AND)
condition, needed_inner = self.build_filter(
('%s__in' % trimmed_prefix, query),
diff --git a/django/db/models/sql/subqueries.py b/django/db/models/sql/subqueries.py
index e9e292e787..86b1efd3f8 100644
--- a/django/db/models/sql/subqueries.py
+++ b/django/db/models/sql/subqueries.py
@@ -5,12 +5,12 @@ Query subclasses which provide extra functionality beyond simple data retrieval.
from django.conf import settings
from django.core.exceptions import FieldError
from django.db import connections
+from django.db.models.query_utils import Q
from django.db.models.constants import LOOKUP_SEP
from django.db.models.fields import DateField, DateTimeField, FieldDoesNotExist
from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE, SelectInfo
from django.db.models.sql.datastructures import Date, DateTime
from django.db.models.sql.query import Query
-from django.db.models.sql.where import AND, Constraint
from django.utils import six
from django.utils import timezone
@@ -42,10 +42,10 @@ class DeleteQuery(Query):
if not field:
field = self.get_meta().pk
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
- where = self.where_class()
- where.add((Constraint(None, field.column, field), 'in',
- pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]), AND)
- self.do_query(self.get_meta().db_table, where, using=using)
+ self.where = self.where_class()
+ self.add_q(Q(
+ **{field.attname + '__in': pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]}))
+ self.do_query(self.get_meta().db_table, self.where, using=using)
def delete_qs(self, query, using):
"""
@@ -80,9 +80,8 @@ class DeleteQuery(Query):
SelectInfo((self.get_initial_alias(), pk.column), None)
]
values = innerq
- where = self.where_class()
- where.add((Constraint(None, pk.column, pk), 'in', values), AND)
- self.where = where
+ self.where = self.where_class()
+ self.add_q(Q(pk__in=values))
self.get_compiler(using).execute_sql(None)
@@ -113,13 +112,10 @@ class UpdateQuery(Query):
related_updates=self.related_updates.copy(), **kwargs)
def update_batch(self, pk_list, values, using):
- pk_field = self.get_meta().pk
self.add_update_values(values)
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
self.where = self.where_class()
- self.where.add((Constraint(None, pk_field.column, pk_field), 'in',
- pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]),
- AND)
+ self.add_q(Q(pk__in=pk_list[offset: offset + GET_ITERATOR_CHUNK_SIZE]))
self.get_compiler(using).execute_sql(None)
def add_update_values(self, values):
diff --git a/django/db/models/sql/where.py b/django/db/models/sql/where.py
index 44a4ce9d1d..be0c559c1b 100644
--- a/django/db/models/sql/where.py
+++ b/django/db/models/sql/where.py
@@ -5,6 +5,7 @@ Code to manage the creation and SQL rendering of 'where' constraints.
import collections
import datetime
from itertools import repeat
+import warnings
from django.conf import settings
from django.db.models.fields import DateTimeField, Field
@@ -101,7 +102,7 @@ class WhereNode(tree.Node):
for child in self.children:
try:
if hasattr(child, 'as_sql'):
- sql, params = child.as_sql(qn=qn, connection=connection)
+ sql, params = qn.compile(child)
else:
# A leaf node in the tree.
sql, params = self.make_atom(child, qn, connection)
@@ -152,16 +153,16 @@ class WhereNode(tree.Node):
sql_string = '(%s)' % sql_string
return sql_string, result_params
- def get_cols(self):
+ def get_group_by_cols(self):
cols = []
for child in self.children:
- if hasattr(child, 'get_cols'):
- cols.extend(child.get_cols())
+ if hasattr(child, 'get_group_by_cols'):
+ cols.extend(child.get_group_by_cols())
else:
if isinstance(child[0], Constraint):
cols.append((child[0].alias, child[0].col))
- if hasattr(child[3], 'get_cols'):
- cols.extend(child[3].get_cols())
+ if hasattr(child[3], 'get_group_by_cols'):
+ cols.extend(child[3].get_group_by_cols())
return cols
def make_atom(self, child, qn, connection):
@@ -174,6 +175,9 @@ class WhereNode(tree.Node):
Returns the string for the SQL fragment and the parameters to use for
it.
"""
+ warnings.warn(
+ "The make_atom() method will be removed in Django 1.9. Use Lookup class instead.",
+ PendingDeprecationWarning)
lvalue, lookup_type, value_annotation, params_or_value = child
field_internal_type = lvalue.field.get_internal_type() if lvalue.field else None
@@ -193,13 +197,13 @@ class WhereNode(tree.Node):
field_sql, field_params = self.sql_for_columns(lvalue, qn, connection, field_internal_type), []
else:
# A smart object with an as_sql() method.
- field_sql, field_params = lvalue.as_sql(qn, connection)
+ field_sql, field_params = qn.compile(lvalue)
is_datetime_field = value_annotation is datetime.datetime
cast_sql = connection.ops.datetime_cast_sql() if is_datetime_field else '%s'
if hasattr(params, 'as_sql'):
- extra, params = params.as_sql(qn, connection)
+ extra, params = qn.compile(params)
cast_sql = ''
else:
extra = ''
@@ -282,6 +286,8 @@ class WhereNode(tree.Node):
if hasattr(child, 'relabel_aliases'):
# For example another WhereNode
child.relabel_aliases(change_map)
+ elif hasattr(child, 'relabeled_clone'):
+ self.children[pos] = child.relabeled_clone(change_map)
elif isinstance(child, (list, tuple)):
# tuple starting with Constraint
child = (child[0].relabeled_clone(change_map),) + child[1:]
@@ -347,10 +353,13 @@ class Constraint(object):
pre-process itself prior to including in the WhereNode.
"""
def __init__(self, alias, col, field):
+ warnings.warn(
+ "The Constraint class will be removed in Django 1.9. Use Lookup class instead.",
+ PendingDeprecationWarning)
self.alias, self.col, self.field = alias, col, field
def prepare(self, lookup_type, value):
- if self.field:
+ if self.field and not hasattr(value, 'as_sql'):
return self.field.get_prep_lookup(lookup_type, value)
return value