diff options
| author | Anton Samarchyan <anton.samarchyan@savoirfairelinux.com> | 2017-01-24 18:04:12 -0500 |
|---|---|---|
| committer | Tim Graham <timograham@gmail.com> | 2017-02-28 09:17:27 -0500 |
| commit | 60e52a047e55bc4cd5a93a8bd4d07baed27e9a22 (patch) | |
| tree | 010a363968b1ed41adf2e64c98d572d7148a2a5e /django/db/models | |
| parent | d6e26e5b7c8063c2cc5aa045edea6555bf358fc2 (diff) | |
Refs #27656 -- Updated django.db docstring verbs according to PEP 257.
Diffstat (limited to 'django/db/models')
| -rw-r--r-- | django/db/models/base.py | 84 | ||||
| -rw-r--r-- | django/db/models/deletion.py | 16 | ||||
| -rw-r--r-- | django/db/models/expressions.py | 50 | ||||
| -rw-r--r-- | django/db/models/fields/__init__.py | 144 | ||||
| -rw-r--r-- | django/db/models/fields/files.py | 10 | ||||
| -rw-r--r-- | django/db/models/fields/related.py | 33 | ||||
| -rw-r--r-- | django/db/models/fields/reverse_related.py | 8 | ||||
| -rw-r--r-- | django/db/models/functions/base.py | 27 | ||||
| -rw-r--r-- | django/db/models/manager.py | 18 | ||||
| -rw-r--r-- | django/db/models/options.py | 26 | ||||
| -rw-r--r-- | django/db/models/query.py | 213 | ||||
| -rw-r--r-- | django/db/models/query_utils.py | 19 | ||||
| -rw-r--r-- | django/db/models/sql/compiler.py | 83 | ||||
| -rw-r--r-- | django/db/models/sql/datastructures.py | 2 | ||||
| -rw-r--r-- | django/db/models/sql/query.py | 196 | ||||
| -rw-r--r-- | django/db/models/sql/subqueries.py | 30 | ||||
| -rw-r--r-- | django/db/models/sql/where.py | 18 | ||||
| -rw-r--r-- | django/db/models/utils.py | 4 |
18 files changed, 410 insertions, 571 deletions
diff --git a/django/db/models/base.py b/django/db/models/base.py index cf3c3f0584..efc8b1862f 100644 --- a/django/db/models/base.py +++ b/django/db/models/base.py @@ -69,9 +69,7 @@ def subclass_exception(name, parents, module, attached_to=None): class ModelBase(type): - """ - Metaclass for all models. - """ + """Metaclass for all models.""" def __new__(cls, name, bases, attrs): super_new = super().__new__ @@ -322,9 +320,7 @@ class ModelBase(type): setattr(cls, name, value) def _prepare(cls): - """ - Creates some methods once self._meta has been populated. - """ + """Create some methods once self._meta has been populated.""" opts = cls._meta opts._prepare(cls) @@ -372,9 +368,7 @@ class ModelBase(type): class ModelState: - """ - A class for storing instance state - """ + """Store model instance state.""" def __init__(self, db=None): self.db = db # If true, uniqueness validation checks will consider this a new, as-yet-unsaved object. @@ -561,7 +555,7 @@ class Model(metaclass=ModelBase): def get_deferred_fields(self): """ - Returns a set containing names of deferred fields for this instance. + Return a set containing names of deferred fields for this instance. """ return { f.attname for f in self._meta.concrete_fields @@ -570,7 +564,7 @@ class Model(metaclass=ModelBase): def refresh_from_db(self, using=None, fields=None): """ - Reloads field values from the database. + Reload field values from the database. By default, the reloading happens from the database this instance was loaded from, or by the read router if this instance wasn't loaded from @@ -622,10 +616,10 @@ class Model(metaclass=ModelBase): def serializable_value(self, field_name): """ - Returns the value of the field name for this instance. If the field is - a foreign key, returns the id value, instead of the object. If there's - no Field object with this name on the model, the model attribute's - value is returned directly. + Return the value of the field name for this instance. If the field is + a foreign key, return the id value instead of the object. If there's + no Field object with this name on the model, return the model + attribute's value. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly @@ -640,7 +634,7 @@ class Model(metaclass=ModelBase): def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ - Saves the current instance. Override this in a subclass if you want to + Save the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist @@ -721,7 +715,7 @@ class Model(metaclass=ModelBase): def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ - Handles the parts of saving which should be done only once per save, + Handle the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. @@ -761,9 +755,7 @@ class Model(metaclass=ModelBase): save_base.alters_data = True def _save_parents(self, cls, using, update_fields): - """ - Saves all the parents of cls using values from self. - """ + """Save all the parents of cls using values from self.""" meta = cls._meta for parent, field in meta.parents.items(): # Make sure the link fields are synced between parent and self. @@ -787,7 +779,7 @@ class Model(metaclass=ModelBase): def _save_table(self, raw=False, cls=None, force_insert=False, force_update=False, using=None, update_fields=None): """ - Does the heavy-lifting involved in saving. Updates or inserts the data + Do the heavy-lifting involved in saving. Update or insert the data for a single table. """ meta = cls._meta @@ -838,9 +830,8 @@ class Model(metaclass=ModelBase): def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): """ - This method will try to update the model. If the model was updated (in - the sense that an update query was done and a matching row was found - from the DB) the method will return True. + Try to update the model. Return True if the model was updated (if an + update query was done and a matching row was found in the DB). """ filtered = base_qs.filter(pk=pk_val) if not values: @@ -936,8 +927,8 @@ class Model(metaclass=ModelBase): def validate_unique(self, exclude=None): """ - Checks unique constraints on the model and raises ``ValidationError`` - if any failed. + Check unique constraints on the model and raise ValidationError if any + failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) @@ -952,12 +943,11 @@ class Model(metaclass=ModelBase): def _get_unique_checks(self, exclude=None): """ - Gather a list of checks to perform. Since validate_unique could be + Return a list of checks to perform. Since validate_unique() could be called from a ModelForm, some fields may have been excluded; we can't perform a unique check on a model that is missing fields involved - in that check. - Fields that did not validate should also be excluded, but they need - to be passed in via the exclude argument. + in that check. Fields that did not validate should also be excluded, + but they need to be passed in via the exclude argument. """ if exclude is None: exclude = [] @@ -1125,8 +1115,8 @@ class Model(metaclass=ModelBase): def full_clean(self, exclude=None, validate_unique=True): """ - Calls clean_fields, clean, and validate_unique, on the model, - and raises a ``ValidationError`` for any errors that occurred. + Call clean_fields(), clean(), and validate_unique() on the model. + Raise a ValidationError for any errors that occur. """ errors = {} if exclude is None: @@ -1161,7 +1151,7 @@ class Model(metaclass=ModelBase): def clean_fields(self, exclude=None): """ - Cleans all fields and raises a ValidationError containing a dict + Clean all fields and raise a ValidationError containing a dict of all validation errors if any occur. """ if exclude is None: @@ -1212,8 +1202,7 @@ class Model(metaclass=ModelBase): @classmethod def _check_swappable(cls): - """ Check if the swapped model exists. """ - + """Check if the swapped model exists.""" errors = [] if cls._meta.swapped: try: @@ -1253,8 +1242,7 @@ class Model(metaclass=ModelBase): @classmethod def _check_managers(cls, **kwargs): - """ Perform all manager checks. """ - + """Perform all manager checks.""" errors = [] for manager in cls._meta.managers: errors.extend(manager.check(**kwargs)) @@ -1262,8 +1250,7 @@ class Model(metaclass=ModelBase): @classmethod def _check_fields(cls, **kwargs): - """ Perform all field checks. """ - + """Perform all field checks.""" errors = [] for field in cls._meta.local_fields: errors.extend(field.check(**kwargs)) @@ -1304,7 +1291,7 @@ class Model(metaclass=ModelBase): @classmethod def _check_id_field(cls): - """ Check if `id` field is a primary key. """ + """Check if `id` field is a primary key.""" fields = list(f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk) # fields is empty or consists of the invalid "id" field if fields and not fields[0].primary_key and cls._meta.pk.name == 'id': @@ -1321,8 +1308,7 @@ class Model(metaclass=ModelBase): @classmethod def _check_field_name_clashes(cls): - """ Ref #17673. """ - + """Forbid field shadowing in multi-table inheritance.""" errors = [] used_fields = {} # name or attname -> field @@ -1428,7 +1414,7 @@ class Model(metaclass=ModelBase): @classmethod def _check_index_together(cls): - """ Check the value of "index_together" option. """ + """Check the value of "index_together" option.""" if not isinstance(cls._meta.index_together, (tuple, list)): return [ checks.Error( @@ -1455,7 +1441,7 @@ class Model(metaclass=ModelBase): @classmethod def _check_unique_together(cls): - """ Check the value of "unique_together" option. """ + """Check the value of "unique_together" option.""" if not isinstance(cls._meta.unique_together, (tuple, list)): return [ checks.Error( @@ -1530,8 +1516,10 @@ class Model(metaclass=ModelBase): @classmethod def _check_ordering(cls): - """ Check "ordering" option -- is it a list of strings and do all fields - exist? """ + """ + Check "ordering" option -- is it a list of strings and do all fields + exist? + """ if cls._meta._ordering_clash: return [ checks.Error( @@ -1710,9 +1698,7 @@ def make_foreign_order_accessors(model, related_model): def model_unpickle(model_id): - """ - Used to unpickle Model subclasses with deferred fields. - """ + """Used to unpickle Model subclasses with deferred fields.""" if isinstance(model_id, tuple): model = apps.get_model(*model_id) else: diff --git a/django/db/models/deletion.py b/django/db/models/deletion.py index 9da5caaedb..537011487a 100644 --- a/django/db/models/deletion.py +++ b/django/db/models/deletion.py @@ -79,11 +79,11 @@ class Collector: def add(self, objs, source=None, nullable=False, reverse_dependency=False): """ - Adds 'objs' to the collection of objects to be deleted. If the call is + Add 'objs' to the collection of objects to be deleted. If the call is the result of a cascade, 'source' should be the model that caused it, and 'nullable' should be set to True if the relation can be null. - Returns a list of all objects that were not already collected. + Return a list of all objects that were not already collected. """ if not objs: return [] @@ -106,7 +106,7 @@ class Collector: def add_field_update(self, field, value, objs): """ - Schedules a field update. 'objs' must be a homogeneous iterable + Schedule a field update. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet). """ if not objs: @@ -118,12 +118,12 @@ class Collector: def can_fast_delete(self, objs, from_field=None): """ - Determines if the objects in the given queryset-like can be + Determine if the objects in the given queryset-like can be fast-deleted. This can be done if there are no cascades, no parents and no signal listeners for the object class. The 'from_field' tells where we are coming from - we need this to - determine if the objects are in fact to be deleted. Allows also + determine if the objects are in fact to be deleted. Allow also skipping parent -> child -> parent chain preventing fast delete of the child. """ @@ -154,7 +154,7 @@ class Collector: def get_del_batches(self, objs, field): """ - Returns the objs in suitably sized batches for the used connection. + Return the objs in suitably sized batches for the used connection. """ conn_batch_size = max( connections[self.using].ops.bulk_batch_size([field.name], objs), 1) @@ -167,7 +167,7 @@ class Collector: def collect(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False, keep_parents=False): """ - Adds 'objs' to the collection of objects to be deleted as well as all + Add 'objs' to the collection of objects to be deleted as well as all parent instances. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet). If 'collect_related' is True, related objects will be handled by their respective on_delete handler. @@ -228,7 +228,7 @@ class Collector: def related_objects(self, related, objs): """ - Gets a QuerySet of objects related to ``objs`` via the relation ``related``. + Get a QuerySet of objects related to `objs` via the relation `related`. """ return related.related_model._base_manager.using(self.using).filter( **{"%s__in" % related.field.name: objs} diff --git a/django/db/models/expressions.py b/django/db/models/expressions.py index 2528da2249..8361e42d20 100644 --- a/django/db/models/expressions.py +++ b/django/db/models/expressions.py @@ -11,7 +11,7 @@ from django.utils.functional import cached_property class Combinable: """ - Provides the ability to combine one or two objects with + Provide the ability to combine one or two objects with some connector. For example F('foo') + F('bar'). """ @@ -120,9 +120,7 @@ class Combinable: @deconstructible class BaseExpression: - """ - Base class for all query expressions. - """ + """Base class for all query expressions.""" # aggregate specific fields is_summary = False @@ -170,7 +168,7 @@ class BaseExpression: * connection: the database connection used for the current query. - Returns: (sql, params) + Return: (sql, params) Where `sql` is a string containing ordered sql parameters to be replaced with the elements of the list `params`. """ @@ -192,7 +190,7 @@ class BaseExpression: def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): """ - Provides the chance to do any preprocessing or validation before being + Provide the chance to do any preprocessing or validation before being added to the query. Arguments: @@ -203,7 +201,7 @@ class BaseExpression: * summarize: a terminal aggregate clause * for_save: whether this expression about to be used in a save or update - Returns: an Expression to be added to the query. + Return: an Expression to be added to the query. """ c = self.copy() c.is_summary = summarize @@ -214,9 +212,7 @@ class BaseExpression: return c def _prepare(self, field): - """ - Hook used by Lookup.get_prep_lookup() to do custom preparation. - """ + """Hook used by Lookup.get_prep_lookup() to do custom preparation.""" return self @property @@ -225,9 +221,7 @@ class BaseExpression: @cached_property def output_field(self): - """ - Returns the output type of this expressions. - """ + """Return the output type of this expressions.""" if self._output_field_or_none is None: raise FieldError("Cannot resolve expression type, unknown output_field") return self._output_field_or_none @@ -235,7 +229,7 @@ class BaseExpression: @cached_property def _output_field_or_none(self): """ - Returns the output field of this expression, or None if no output type + Return the output field of this expression, or None if no output type can be resolved. Note that the 'output_field' property will raise FieldError if no type can be resolved, but this attribute allows for None values. @@ -246,10 +240,9 @@ class BaseExpression: def _resolve_output_field(self): """ - Attempts to infer the output type of the expression. If the output - fields of all source fields match then we can simply infer the same - type here. This isn't always correct, but it makes sense most of the - time. + Attempt to infer the output type of the expression. If the output + fields of all source fields match then, simply infer the same type + here. This isn't always correct, but it makes sense most of the time. Consider the difference between `2 + 2` and `2 / 3`. Inferring the type here is a convenience for the common case. The user should @@ -316,10 +309,7 @@ class BaseExpression: return cols def get_source_fields(self): - """ - Returns the underlying field types used by this - aggregate. - """ + """Return the underlying field types used by this aggregate.""" return [e._output_field_or_none for e in self.get_source_expressions()] def asc(self, **kwargs): @@ -364,9 +354,7 @@ class BaseExpression: class Expression(BaseExpression, Combinable): - """ - An expression that can be combined with other expressions. - """ + """An expression that can be combined with other expressions.""" pass @@ -470,9 +458,7 @@ class TemporalSubtraction(CombinedExpression): @deconstructible class F(Combinable): - """ - An object capable of resolving references to existing query objects. - """ + """An object capable of resolving references to existing query objects.""" def __init__(self, name): """ Arguments: @@ -527,9 +513,7 @@ class OuterRef(F): class Func(Expression): - """ - An SQL function call. - """ + """An SQL function call.""" function = None template = '%(function)s(%(expressions)s)' arg_joiner = ', ' @@ -608,9 +592,7 @@ class Func(Expression): class Value(Expression): - """ - Represents a wrapped value as a node within an expression - """ + """Represent a wrapped value as a node within an expression.""" def __init__(self, value, output_field=None): """ Arguments: diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py index fdfca33b72..8532959350 100644 --- a/django/db/models/fields/__init__.py +++ b/django/db/models/fields/__init__.py @@ -190,9 +190,7 @@ class Field(RegisterLookupMixin): return '%s.%s.%s' % (app, model._meta.object_name, self.name) def __repr__(self): - """ - Displays the module, class and name of the field. - """ + """Display the module, class, and name of the field.""" path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__) name = getattr(self, 'name', None) if name is not None: @@ -210,9 +208,10 @@ class Field(RegisterLookupMixin): return errors def _check_field_name(self): - """ Check if field name is valid, i.e. 1) does not end with an - underscore, 2) does not contain "__" and 3) is not "pk". """ - + """ + Check if field name is valid, i.e. 1) does not end with an + underscore, 2) does not contain "__" and 3) is not "pk". + """ if self.name.endswith('_'): return [ checks.Error( @@ -348,37 +347,42 @@ class Field(RegisterLookupMixin): def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, GIS columns need to be - selected as AsText(table.col) on MySQL as the table.col data can't be used - by Django. + selected as AsText(table.col) on MySQL as the table.col data can't be + used by Django. """ return sql, params def deconstruct(self): """ - Returns enough information to recreate the field as a 4-tuple: + Return enough information to recreate the field as a 4-tuple: - * The name of the field on the model, if contribute_to_class has been run - * The import path of the field, including the class: django.db.models.IntegerField - This should be the most portable version, so less specific may be better. - * A list of positional arguments - * A dict of keyword arguments + * The name of the field on the model, if contribute_to_class() has + been run. + * The import path of the field, including the class:e.g. + django.db.models.IntegerField This should be the most portable + version, so less specific may be better. + * A list of positional arguments. + * A dict of keyword arguments. - Note that the positional or keyword arguments must contain values of the - following types (including inner values of collection types): + Note that the positional or keyword arguments must contain values of + the following types (including inner values of collection types): - * None, bool, str, int, float, complex, set, frozenset, list, tuple, dict + * None, bool, str, int, float, complex, set, frozenset, list, tuple, + dict * UUID * datetime.datetime (naive), datetime.date - * top-level classes, top-level functions - will be referenced by their full import path + * top-level classes, top-level functions - will be referenced by their + full import path * Storage instances - these have their own deconstruct() method This is because the values here must be serialized into a text format (possibly new Python code, possibly JSON) and these are the only types with encoding handlers defined. - There's no need to return the exact way the field was instantiated this time, - just ensure that the resulting field is the same - prefer keyword arguments - over positional ones, and omit parameters with their default values. + There's no need to return the exact way the field was instantiated this + time, just ensure that the resulting field is the same - prefer keyword + arguments over positional ones, and omit parameters with their default + values. """ # Short-form way of fetching all the default parameters keywords = {} @@ -486,7 +490,7 @@ class Field(RegisterLookupMixin): def __reduce__(self): """ Pickling should return the model._meta.fields instance of the field, - not a new copy of that field. So, we use the app registry to load the + not a new copy of that field. So, use the app registry to load the model and then the field back. """ if not hasattr(self, 'model'): @@ -512,9 +516,9 @@ class Field(RegisterLookupMixin): def to_python(self, value): """ - Converts the input value into the expected Python data type, raising + Convert the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. - Returns the converted value. Subclasses should override this. + Return the converted value. Subclasses should override this. """ return value @@ -544,8 +548,8 @@ class Field(RegisterLookupMixin): def validate(self, value, model_instance): """ - Validates value and throws ValidationError. Subclasses should override - this to provide validation logic. + Validate value and raise ValidationError if necessary. Subclasses + should override this to provide validation logic. """ if not self.editable: # Skip validation for non-editable fields. @@ -576,8 +580,8 @@ class Field(RegisterLookupMixin): def clean(self, value, model_instance): """ Convert the value's type and run validation. Validation errors - from to_python and validate are propagated. The correct value is - returned if no error is raised. + from to_python() and validate() are propagated. Return the correct + value if no error is raised. """ value = self.to_python(value) self.validate(value, model_instance) @@ -632,9 +636,9 @@ class Field(RegisterLookupMixin): def db_parameters(self, connection): """ - Extension of db_type(), providing a range of different return - values (type, checks). - This will look at db_type(), allowing custom model fields to override it. + Extension of db_type(), providing a range of different return values + (type, checks). This will look at db_type(), allowing custom model + fields to override it. """ type_string = self.db_type(connection) check_string = self.db_check(connection) @@ -667,9 +671,8 @@ class Field(RegisterLookupMixin): """ Register the field with the model class it belongs to. - If private_only is True, a separate instance of this field will be - created for every subclass of cls, even if cls is not an abstract - model. + If private_only is True, create a separate instance of this field + for every subclass of cls, even if cls is not an abstract model. """ self.set_attributes_from_name(name) self.model = cls @@ -709,22 +712,18 @@ class Field(RegisterLookupMixin): return self.__class__.__name__ def pre_save(self, model_instance, add): - """ - Returns field's value just before saving. - """ + """Return field's value just before saving.""" return getattr(model_instance, self.attname) def get_prep_value(self, value): - """ - Perform preliminary non-db specific value checks and conversions. - """ + """Perform preliminary non-db specific value checks and conversions.""" if isinstance(value, Promise): value = value._proxy____cast() return value def get_db_prep_value(self, value, connection, prepared=False): - """Returns field's value prepared for interacting with the database - backend. + """ + Return field's value prepared for interacting with the database backend. Used by the default implementations of get_db_prep_save(). """ @@ -733,22 +732,15 @@ class Field(RegisterLookupMixin): return value def get_db_prep_save(self, value, connection): - """ - Returns field's value prepared for saving into a database. - """ - return self.get_db_prep_value(value, connection=connection, - prepared=False) + """Return field's value prepared for saving into a database.""" + return self.get_db_prep_value(value, connection=connection, prepared=False) def has_default(self): - """ - Returns a boolean of whether this field has a default value. - """ + """Return a boolean of whether this field has a default value.""" return self.default is not NOT_PROVIDED def get_default(self): - """ - Returns the default value for this field. - """ + """Return the default value for this field.""" return self._get_default() @cached_property @@ -760,11 +752,13 @@ class Field(RegisterLookupMixin): if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls: return return_None - return str # returns empty string + return str # return empty string def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None): - """Returns choices with a default blank choices included, for use - as SelectField choices for this field.""" + """ + Return choices with a default blank choices included, for use + as <select> choices for this field. + """ blank_defined = False choices = list(self.choices) if self.choices else [] named_groups = choices and isinstance(choices[0][1], (list, tuple)) @@ -793,7 +787,7 @@ class Field(RegisterLookupMixin): def value_to_string(self, obj): """ - Returns a string value of this field from the passed obj. + Return a string value of this field from the passed obj. This is used by the serialization framework. """ return force_text(self.value_from_object(obj)) @@ -813,9 +807,7 @@ class Field(RegisterLookupMixin): setattr(instance, self.name, data) def formfield(self, form_class=None, choices_form_class=None, **kwargs): - """ - Returns a django.forms.Field instance for this database Field. - """ + """Return a django.forms.Field instance for this field.""" defaults = {'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text} @@ -851,9 +843,7 @@ class Field(RegisterLookupMixin): return form_class(**defaults) def value_from_object(self, obj): - """ - Returns the value of this field in the given model instance. - """ + """Return the value of this field in the given model instance.""" return getattr(obj, self.attname) @@ -1137,11 +1127,8 @@ class DateField(DateTimeCheckMixin, Field): def _check_fix_default_value(self): """ - Adds a warning to the checks framework stating, that using an actual - date or datetime value is probably wrong; it's only being evaluated on - server start-up. - - For details see ticket #21905 + Warn that using an actual date or datetime value is probably wrong; + it's only evaluated on server startup. """ if not self.has_default(): return [] @@ -1279,11 +1266,8 @@ class DateTimeField(DateField): def _check_fix_default_value(self): """ - Adds a warning to the checks framework stating, that using an actual - date or datetime value is probably wrong; it's only being evaluated on - server start-up. - - For details see ticket #21905 + Warn that using an actual date or datetime value is probably wrong; + it's only evaluated on server startup. """ if not self.has_default(): return [] @@ -1539,7 +1523,7 @@ class DecimalField(Field): def format_number(self, value): """ - Formats a number into a string with the requisite number of digits and + Format a number into a string with the requisite number of digits and decimal places. """ # Method moved to django.db.backends.utils. @@ -1569,9 +1553,10 @@ class DecimalField(Field): class DurationField(Field): - """Stores timedelta objects. + """ + Store timedelta objects. - Uses interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint + Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint of microseconds on other databases. """ empty_strings_allowed = False @@ -2123,11 +2108,8 @@ class TimeField(DateTimeCheckMixin, Field): def _check_fix_default_value(self): """ - Adds a warning to the checks framework stating, that using an actual - time or datetime value is probably wrong; it's only being evaluated on - server start-up. - - For details see ticket #21905 + Warn that using an actual date or datetime value is probably wrong; + it's only evaluated on server startup. """ if not self.has_default(): return [] diff --git a/django/db/models/fields/files.py b/django/db/models/fields/files.py index f2a1ff01b1..bf0b1682c5 100644 --- a/django/db/models/fields/files.py +++ b/django/db/models/fields/files.py @@ -133,14 +133,14 @@ class FieldFile(File): class FileDescriptor: """ - The descriptor for the file attribute on the model instance. Returns a - FieldFile when accessed so you can do stuff like:: + The descriptor for the file attribute on the model instance. Return a + FieldFile when accessed so you can write code like:: >>> from myapp.models import MyModel >>> instance = MyModel.objects.get(pk=1) >>> instance.file.size - Assigns a file object on assignment so you can do:: + Assign a file object on assignment so you can do:: >>> with open('/path/to/hello.world', 'r') as f: ... instance.file = File(f) @@ -275,7 +275,6 @@ class FileField(Field): return "FileField" def get_prep_value(self, value): - "Returns field's value prepared for saving into a database." value = super().get_prep_value(value) # Need to convert File objects provided via a form to string for database insertion if value is None: @@ -283,7 +282,6 @@ class FileField(Field): return str(value) def pre_save(self, model_instance, add): - "Returns field's value just before saving." file = super().pre_save(model_instance, add) if file and not file._committed: # Commit the file to storage prior to saving the model @@ -406,7 +404,7 @@ class ImageField(FileField): def update_dimension_fields(self, instance, force=False, *args, **kwargs): """ - Updates field's width and height fields, if defined. + Update field's width and height fields, if defined. This method is hooked up to model's post_init signal to update dimensions after instantiating a model instance. However, dimensions diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index 94c4cc81b9..c2e6f6bffa 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -80,9 +80,7 @@ def lazy_related_operation(function, model, *related_models, **kwargs): class RelatedField(Field): - """ - Base class that all relational fields inherit from. - """ + """Base class that all relational fields inherit from.""" # Field flags one_to_many = False @@ -192,9 +190,7 @@ class RelatedField(Field): return [] def _check_clashes(self): - """ - Check accessor and reverse query name clashes. - """ + """Check accessor and reverse query name clashes.""" from django.db.models.base import ModelBase errors = [] @@ -424,7 +420,7 @@ class RelatedField(Field): @property def target_field(self): """ - When filtering against this relation, returns the field on the remote + When filtering against this relation, return the field on the remote model against which the filtering should happen. """ target_fields = self.get_path_info()[-1].target_fields @@ -436,7 +432,7 @@ class RelatedField(Field): class ForeignObject(RelatedField): """ - Abstraction of the ForeignKey relation, supports multi-column relations. + Abstraction of the ForeignKey relation to support multi-column relations. """ # Field flags @@ -693,17 +689,13 @@ class ForeignObject(RelatedField): return None def get_path_info(self): - """ - Get path from this field to the related model. - """ + """Get path from this field to the related model.""" opts = self.remote_field.model._meta from_opts = self.model._meta return [PathInfo(from_opts, opts, self.foreign_related_fields, self, False, True)] def get_reverse_path_info(self): - """ - Get path from the related model to this field's model. - """ + """Get path from the related model to this field's model.""" opts = self.model._meta from_opts = self.remote_field.model._meta pathinfos = [PathInfo(from_opts, opts, (opts.pk,), self.remote_field, not self.unique, False)] @@ -861,9 +853,7 @@ class ForeignKey(ForeignObject): return self.foreign_related_fields[0] def get_reverse_path_info(self): - """ - Get path from the related model to this field's model. - """ + """Get path from the related model to this field's model.""" opts = self.model._meta from_opts = self.remote_field.model._meta pathinfos = [PathInfo(from_opts, opts, (opts.pk,), self.remote_field, not self.unique, False)] @@ -900,7 +890,7 @@ class ForeignKey(ForeignObject): return attname, column def get_default(self): - "Here we check if the default value is an object and return the to_field if so." + """Return the to_field if the default value is an object.""" field_default = super().get_default() if isinstance(field_default, self.remote_field.model): return getattr(field_default, self.target_field.attname) @@ -1441,9 +1431,7 @@ class ManyToManyField(RelatedField): return name, path, args, kwargs def _get_path_info(self, direct=False): - """ - Called by both direct and indirect m2m traversal. - """ + """Called by both direct and indirect m2m traversal.""" pathinfos = [] int_model = self.remote_field.through linkfield1 = int_model._meta.get_field(self.m2m_field_name()) @@ -1598,9 +1586,6 @@ class ManyToManyField(RelatedField): pass def value_from_object(self, obj): - """ - Return the value of this field in the given model instance. - """ if obj.pk is None: return self.related_model.objects.none() return getattr(obj, self.attname).all() diff --git a/django/db/models/fields/reverse_related.py b/django/db/models/fields/reverse_related.py index 3d5439ab84..3e0285abea 100644 --- a/django/db/models/fields/reverse_related.py +++ b/django/db/models/fields/reverse_related.py @@ -66,7 +66,7 @@ class ForeignObjectRel: @property def target_field(self): """ - When filtering against this relation, returns the field on the remote + When filtering against this relation, return the field on the remote model against which the filtering should happen. """ target_fields = self.get_path_info()[-1].target_fields @@ -116,8 +116,8 @@ class ForeignObjectRel: def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH): """ - Return choices with a default blank choices included, for use as - SelectField choices for this field. + Return choices with a default blank choices included, for use + as <select> choices for this field. Analog of django.db.models.fields.Field.get_choices(), provided initially for utilization by RelatedFieldListFilter. @@ -127,7 +127,7 @@ class ForeignObjectRel: ] def is_hidden(self): - "Should the related object be hidden?" + """Should the related object be hidden?""" return bool(self.related_name) and self.related_name[-1] == '+' def get_joining_columns(self): diff --git a/django/db/models/functions/base.py b/django/db/models/functions/base.py index 905f740a6c..beac2e2d72 100644 --- a/django/db/models/functions/base.py +++ b/django/db/models/functions/base.py @@ -5,9 +5,7 @@ from django.db.models import Func, Transform, Value, fields class Cast(Func): - """ - Coerce an expression to a new field type. - """ + """Coerce an expression to a new field type.""" function = 'CAST' template = '%(function)s(%(expressions)s AS %(db_type)s)' @@ -38,9 +36,7 @@ class Cast(Func): class Coalesce(Func): - """ - Chooses, from left to right, the first non-null expression and returns it. - """ + """Return, from left to right, the first non-null expression.""" function = 'COALESCE' def __init__(self, *expressions, **extra): @@ -65,9 +61,8 @@ class Coalesce(Func): class ConcatPair(Func): """ - A helper class that concatenates two arguments together. This is used - by `Concat` because not all backend databases support more than two - arguments. + Concatenate two arguments together. This is used by `Concat` because not + all backend databases support more than two arguments. """ function = 'CONCAT' @@ -98,9 +93,9 @@ class ConcatPair(Func): class Concat(Func): """ - Concatenates text fields together. Backends that result in an entire + Concatenate text fields together. Backends that result in an entire null expression when any arguments are null will wrap each argument in - coalesce functions to ensure we always get a non-null result. + coalesce functions to ensure a non-null result. """ function = None template = "%(expressions)s" @@ -122,7 +117,7 @@ class Concat(Func): class Greatest(Func): """ - Chooses the maximum expression and returns it. + Return the maximum expression. If any expression is null the return value is database-specific: On Postgres, the maximum not-null expression is returned. @@ -142,11 +137,11 @@ class Greatest(Func): class Least(Func): """ - Chooses the minimum expression and returns it. + Return the minimum expression. If any expression is null the return value is database-specific: - On Postgres, the minimum not-null expression is returned. - On MySQL, Oracle, and SQLite, if any expression is null, null is returned. + On Postgres, return the minimum not-null expression. + On MySQL, Oracle, and SQLite, if any expression is null, return null. """ function = 'LEAST' @@ -161,7 +156,7 @@ class Least(Func): class Length(Transform): - """Returns the number of characters in the expression""" + """Return the number of characters in the expression.""" function = 'LENGTH' lookup_name = 'length' diff --git a/django/db/models/manager.py b/django/db/models/manager.py index dd97272d38..31f2dd3723 100644 --- a/django/db/models/manager.py +++ b/django/db/models/manager.py @@ -7,18 +7,18 @@ from django.db.models.query import QuerySet class BaseManager: - # Tracks each time a Manager instance is created. Used to retain order. + # To retain order, track each time a Manager instance is created. creation_counter = 0 # Set to True for the 'objects' managers that are automatically created. auto_created = False #: If set to True the manager will be serialized into migrations and will - #: thus be available in e.g. RunPython operations + #: thus be available in e.g. RunPython operations. use_in_migrations = False def __new__(cls, *args, **kwargs): - # We capture the arguments to make returning them trivial + # Capture the arguments to make returning them trivial. obj = super().__new__(cls) obj._constructor_args = (args, kwargs) return obj @@ -32,15 +32,15 @@ class BaseManager: self._hints = {} def __str__(self): - """ Return "app_label.model_label.manager_name". """ + """Return "app_label.model_label.manager_name".""" return '%s.%s' % (self.model._meta.label, self.name) def deconstruct(self): """ - Returns a 5-tuple of the form (as_manager (True), manager_class, + Return a 5-tuple of the form (as_manager (True), manager_class, queryset_class, args, kwargs). - Raises a ValueError if the manager is dynamically generated. + Raise a ValueError if the manager is dynamically generated. """ qs_class = self._queryset_class if getattr(self, '_built_with_as_manager', False): @@ -118,7 +118,7 @@ class BaseManager: def _set_creation_counter(self): """ - Sets the creation counter value for this instance and increments the + Set the creation counter value for this instance and increment the class-level copy. """ self.creation_counter = BaseManager.creation_counter @@ -140,8 +140,8 @@ class BaseManager: def get_queryset(self): """ - Returns a new QuerySet object. Subclasses can override this method to - easily customize the behavior of the Manager. + Return a new QuerySet object. Subclasses can override this method to + customize the behavior of the Manager. """ return self._queryset_class(model=self.model, using=self._db, hints=self._hints) diff --git a/django/db/models/options.py b/django/db/models/options.py index b52f057bb7..c8f7ade82c 100644 --- a/django/db/models/options.py +++ b/django/db/models/options.py @@ -284,7 +284,7 @@ class Options: def setup_proxy(self, target): """ - Does the internal setup so that the current model is a proxy for + Do the internal setup so that the current model is a proxy for "target". """ self.pk = target._meta.pk @@ -315,11 +315,7 @@ class Options: @property def verbose_name_raw(self): - """ - There are a few places where the untranslated verbose name is needed - (so that we get the same value regardless of currently active - locale). - """ + """Return the untranslated verbose name.""" with override(None): return force_text(self.verbose_name) @@ -427,7 +423,7 @@ class Options: @cached_property def fields(self): """ - Returns a list of all forward fields on the model and its parents, + Return a list of all forward fields on the model and its parents, excluding ManyToManyFields. Private API intended only to be used by Django itself; get_fields() @@ -461,7 +457,7 @@ class Options: @cached_property def concrete_fields(self): """ - Returns a list of all concrete fields on the model and its parents. + Return a list of all concrete fields on the model and its parents. Private API intended only to be used by Django itself; get_fields() combined with filtering of field properties is the public API for @@ -474,7 +470,7 @@ class Options: @cached_property def local_concrete_fields(self): """ - Returns a list of all concrete fields on the model. + Return a list of all concrete fields on the model. Private API intended only to be used by Django itself; get_fields() combined with filtering of field properties is the public API for @@ -487,7 +483,7 @@ class Options: @cached_property def many_to_many(self): """ - Returns a list of all many to many fields on the model and its parents. + Return a list of all many to many fields on the model and its parents. Private API intended only to be used by Django itself; get_fields() combined with filtering of field properties is the public API for @@ -501,7 +497,7 @@ class Options: @cached_property def related_objects(self): """ - Returns all related objects pointing to the current model. The related + Return all related objects pointing to the current model. The related objects can come from a one-to-one, one-to-many, or many-to-many field relation type. @@ -589,7 +585,7 @@ class Options: def get_parent_list(self): """ - Returns all the ancestors of this model as a list ordered by MRO. + Return all the ancestors of this model as a list ordered by MRO. Useful for determining if something is an ancestor, regardless of lineage. """ result = OrderedSet(self.parents) @@ -600,12 +596,12 @@ class Options: def get_ancestor_link(self, ancestor): """ - Returns the field on the current model which points to the given + Return the field on the current model which points to the given "ancestor". This is possible an indirect link (a pointer to a parent model, which points, eventually, to the ancestor). Used when constructing table joins for model inheritance. - Returns None if the model isn't an ancestor of this one. + Return None if the model isn't an ancestor of this one. """ if ancestor in self.parents: return self.parents[ancestor] @@ -717,7 +713,7 @@ class Options: def get_fields(self, include_parents=True, include_hidden=False): """ - Returns a list of fields associated to the model. By default, includes + Return a list of fields associated to the model. By default, include forward and reverse fields, fields derived from inheritance, but not hidden fields. The returned fields can be changed using the parameters: diff --git a/django/db/models/query.py b/django/db/models/query.py index 290be2e779..4e786a239e 100644 --- a/django/db/models/query.py +++ b/django/db/models/query.py @@ -39,9 +39,7 @@ class BaseIterable: class ModelIterable(BaseIterable): - """ - Iterable that yields a model instance for each row. - """ + """Iterable that yields a model instance for each row.""" def __iter__(self): queryset = self.queryset @@ -86,8 +84,7 @@ class ModelIterable(BaseIterable): class ValuesIterable(BaseIterable): """ - Iterable returned by QuerySet.values() that yields a dict - for each row. + Iterable returned by QuerySet.values() that yields a dict for each row. """ def __iter__(self): @@ -108,8 +105,8 @@ class ValuesIterable(BaseIterable): class ValuesListIterable(BaseIterable): """ - Iterable returned by QuerySet.values_list(flat=False) - that yields a tuple for each row. + Iterable returned by QuerySet.values_list(flat=False) that yields a tuple + for each row. """ def __iter__(self): @@ -141,8 +138,8 @@ class ValuesListIterable(BaseIterable): class FlatValuesListIterable(BaseIterable): """ - Iterable returned by QuerySet.values_list(flat=True) that - yields single values. + Iterable returned by QuerySet.values_list(flat=True) that yields single + values. """ def __iter__(self): @@ -153,9 +150,7 @@ class FlatValuesListIterable(BaseIterable): class QuerySet: - """ - Represents a lazy database lookup for a set of objects. - """ + """Represent a lazy database lookup for a set of objects.""" def __init__(self, model=None, query=None, using=None, hints=None): self.model = model @@ -185,9 +180,7 @@ class QuerySet: ######################## def __deepcopy__(self, memo): - """ - Deep copy of a QuerySet doesn't populate the cache - """ + """Don't populate the QuerySet's cache.""" obj = self.__class__() for k, v in self.__dict__.items(): if k == '_result_cache': @@ -254,9 +247,7 @@ class QuerySet: return bool(self._result_cache) def __getitem__(self, k): - """ - Retrieves an item or slice from the set of results. - """ + """Retrieve an item or slice from the set of results.""" if not isinstance(k, (int, slice)): raise TypeError assert ((not isinstance(k, slice) and (k >= 0)) or @@ -319,8 +310,8 @@ class QuerySet: def aggregate(self, *args, **kwargs): """ - Returns a dictionary containing the calculations (aggregation) - over the current queryset + Return a dictionary containing the calculations (aggregation) + over the current queryset. If args is present the expression is passed as a kwarg using the Aggregate object's default alias. @@ -347,11 +338,11 @@ class QuerySet: def count(self): """ - Performs a SELECT COUNT() and returns the number of records as an + Perform a SELECT COUNT() and return the number of records as an integer. - If the QuerySet is already fully cached this simply returns the length - of the cached results set to avoid multiple SELECT COUNT(*) calls. + If the QuerySet is already fully cached, return the length of the + cached results set to avoid multiple SELECT COUNT(*) calls. """ if self._result_cache is not None: return len(self._result_cache) @@ -360,7 +351,7 @@ class QuerySet: def get(self, *args, **kwargs): """ - Performs the query and returns a single object matching the given + Perform the query and return a single object matching the given keyword arguments. """ clone = self.filter(*args, **kwargs) @@ -381,7 +372,7 @@ class QuerySet: def create(self, **kwargs): """ - Creates a new object with the given kwargs, saving it to the database + Create a new object with the given kwargs, saving it to the database and returning the created object. """ obj = self.model(**kwargs) @@ -396,9 +387,9 @@ class QuerySet: def bulk_create(self, objs, batch_size=None): """ - Inserts each of the instances into the database. This does *not* call - save() on each of the instances, does not send any pre/post save - signals, and does not set the primary key attribute if it is an + Insert each of the instances into the database. Do *not* call + save() on each of the instances, do not send any pre/post_save + signals, and do not set the primary key attribute if it is an autoincrement field (except if features.can_return_ids_from_bulk_insert=True). Multi-table models are not supported. """ @@ -447,8 +438,8 @@ class QuerySet: def get_or_create(self, defaults=None, **kwargs): """ - Looks up an object with the given kwargs, creating one if necessary. - Returns a tuple of (object, created), where created is a boolean + Look up an object with the given kwargs, creating one if necessary. + Return a tuple of (object, created), where created is a boolean specifying whether an object was created. """ lookup, params = self._extract_model_params(defaults, **kwargs) @@ -462,9 +453,9 @@ class QuerySet: def update_or_create(self, defaults=None, **kwargs): """ - Looks up an object with the given kwargs, updating one with defaults - if it exists, otherwise creates a new one. - Returns a tuple (object, created), where created is a boolean + Look up an object with the given kwargs, updating one with defaults + if it exists, otherwise create a new one. + Return a tuple (object, created), where created is a boolean specifying whether an object was created. """ defaults = defaults or {} @@ -484,8 +475,8 @@ class QuerySet: def _create_object_from_params(self, lookup, params): """ - Tries to create an object using passed params. - Used by get_or_create and update_or_create + Try to create an object using passed params. Used by get_or_create() + and update_or_create(). """ try: with transaction.atomic(using=self.db): @@ -502,9 +493,9 @@ class QuerySet: def _extract_model_params(self, defaults, **kwargs): """ - Prepares `lookup` (kwargs that are valid model attributes), `params` + Prepare `lookup` (kwargs that are valid model attributes), `params` (for creating a model instance) based on given kwargs; for use by - get_or_create and update_or_create. + get_or_create() and update_or_create(). """ defaults = defaults or {} lookup = kwargs.copy() @@ -530,7 +521,7 @@ class QuerySet: def _earliest_or_latest(self, field_name=None, direction="-"): """ - Returns the latest object, according to the model's + Return the latest object, according to the model's 'get_latest_by' option or optional given field_name. """ order_by = field_name or getattr(self.model._meta, 'get_latest_by') @@ -551,18 +542,14 @@ class QuerySet: return self._earliest_or_latest(field_name=field_name, direction="-") def first(self): - """ - Returns the first object of a query, returns None if no match is found. - """ + """Return the first object of a query or None if no match is found.""" objects = list((self if self.ordered else self.order_by('pk'))[:1]) if objects: return objects[0] return None def last(self): - """ - Returns the last object of a query, returns None if no match is found. - """ + """Return the last object of a query or None if no match is found.""" objects = list((self.reverse() if self.ordered else self.order_by('-pk'))[:1]) if objects: return objects[0] @@ -570,8 +557,8 @@ class QuerySet: def in_bulk(self, id_list=None): """ - Returns a dictionary mapping each of the given IDs to the object with - that ID. If `id_list` isn't provided, the entire QuerySet is evaluated. + Return a dictionary mapping each of the given IDs to the object with + that ID. If `id_list` isn't provided, evaluate the entire QuerySet. """ assert self.query.can_filter(), \ "Cannot use 'limit' or 'offset' with in_bulk" @@ -584,9 +571,7 @@ class QuerySet: return {obj._get_pk_val(): obj for obj in qs} def delete(self): - """ - Deletes the records in the current QuerySet. - """ + """Delete the records in the current QuerySet.""" assert self.query.can_filter(), \ "Cannot use 'limit' or 'offset' with delete." @@ -618,15 +603,15 @@ class QuerySet: def _raw_delete(self, using): """ - Deletes objects found from the given queryset in single direct SQL - query. No signals are sent, and there is no protection for cascades. + Delete objects found from the given queryset in single direct SQL + query. No signals are sent and there is no protection for cascades. """ return sql.DeleteQuery(self.model).delete_qs(self, using) _raw_delete.alters_data = True def update(self, **kwargs): """ - Updates all elements in the current QuerySet, setting all the given + Update all elements in the current QuerySet, setting all the given fields to the appropriate values. """ assert self.query.can_filter(), \ @@ -644,7 +629,7 @@ class QuerySet: def _update(self, values): """ - A version of update that accepts field objects instead of field names. + A version of update() that accepts field objects instead of field names. Used primarily for model saving and not intended for use by general code (it requires too much poking around at model internals to be useful at that level). @@ -711,7 +696,7 @@ class QuerySet: def dates(self, field_name, kind, order='ASC'): """ - Returns a list of date objects representing all available dates for + Return a list of date objects representing all available dates for the given field_name, scoped to 'kind'. """ assert kind in ("year", "month", "day"), \ @@ -727,7 +712,7 @@ class QuerySet: def datetimes(self, field_name, kind, order='ASC', tzinfo=None): """ - Returns a list of datetime objects representing all available + Return a list of datetime objects representing all available datetimes for the given field_name, scoped to 'kind'. """ assert kind in ("year", "month", "day", "hour", "minute", "second"), \ @@ -747,9 +732,7 @@ class QuerySet: ).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datetimefield') def none(self): - """ - Returns an empty QuerySet. - """ + """Return an empty QuerySet.""" clone = self._clone() clone.query.set_empty() return clone @@ -760,21 +743,21 @@ class QuerySet: def all(self): """ - Returns a new QuerySet that is a copy of the current one. This allows a + Return a new QuerySet that is a copy of the current one. This allows a QuerySet to proxy for a model manager in some cases. """ return self._clone() def filter(self, *args, **kwargs): """ - Returns a new QuerySet instance with the args ANDed to the existing + Return a new QuerySet instance with the args ANDed to the existing set. """ return self._filter_or_exclude(False, *args, **kwargs) def exclude(self, *args, **kwargs): """ - Returns a new QuerySet instance with NOT (args) ANDed to the existing + Return a new QuerySet instance with NOT (args) ANDed to the existing set. """ return self._filter_or_exclude(True, *args, **kwargs) @@ -793,7 +776,7 @@ class QuerySet: def complex_filter(self, filter_obj): """ - Returns a new QuerySet instance with filter_obj added to the filters. + Return a new QuerySet instance with filter_obj added to the filters. filter_obj can be a Q object (or anything with an add_to_query() method) or a dictionary of keyword lookup arguments. @@ -830,7 +813,7 @@ class QuerySet: def select_for_update(self, nowait=False, skip_locked=False): """ - Returns a new QuerySet instance that will select objects with a + Return a new QuerySet instance that will select objects with a FOR UPDATE lock. """ if nowait and skip_locked: @@ -844,12 +827,12 @@ class QuerySet: def select_related(self, *fields): """ - Returns a new QuerySet instance that will select related objects. + Return a new QuerySet instance that will select related objects. If fields are specified, they must be ForeignKey fields and only those related objects are included in the selection. - If select_related(None) is called, the list is cleared. + If select_related(None) is called, clear the list. """ if self._fields is not None: @@ -866,13 +849,12 @@ class QuerySet: def prefetch_related(self, *lookups): """ - Returns a new QuerySet instance that will prefetch the specified + Return a new QuerySet instance that will prefetch the specified Many-To-One and Many-To-Many related objects when the QuerySet is evaluated. - When prefetch_related() is called more than once, the list of lookups to - prefetch is appended to. If prefetch_related(None) is called, the list - is cleared. + When prefetch_related() is called more than once, append to the list of + prefetch lookups. If prefetch_related(None) is called, clear the list. """ clone = self._clone() if lookups == (None,): @@ -924,9 +906,7 @@ class QuerySet: return clone def order_by(self, *field_names): - """ - Returns a new QuerySet instance with the ordering changed. - """ + """Return a new QuerySet instance with the ordering changed.""" assert self.query.can_filter(), \ "Cannot reorder a query once a slice has been taken." obj = self._clone() @@ -936,7 +916,7 @@ class QuerySet: def distinct(self, *field_names): """ - Returns a new QuerySet instance that will select only distinct results. + Return a new QuerySet instance that will select only distinct results. """ assert self.query.can_filter(), \ "Cannot create distinct fields once a slice has been taken." @@ -946,9 +926,7 @@ class QuerySet: def extra(self, select=None, where=None, params=None, tables=None, order_by=None, select_params=None): - """ - Adds extra SQL fragments to the query. - """ + """Add extra SQL fragments to the query.""" assert self.query.can_filter(), \ "Cannot change a query once a slice has been taken" clone = self._clone() @@ -956,20 +934,17 @@ class QuerySet: return clone def reverse(self): - """ - Reverses the ordering of the QuerySet. - """ + """Reverse the ordering of the QuerySet.""" clone = self._clone() clone.query.standard_ordering = not clone.query.standard_ordering return clone def defer(self, *fields): """ - Defers the loading of data for certain fields until they are accessed. - The set of fields to defer is added to any existing set of deferred - fields. The only exception to this is if None is passed in as the only - parameter, in which case all deferrals are removed (None acts as a - reset option). + Defer the loading of data for certain fields until they are accessed. + Add the set of deferred fields to any existing set of deferred fields. + The only exception to this is if None is passed in as the only + parameter, in which case removal all deferrals. """ if self._fields is not None: raise TypeError("Cannot call defer() after .values() or .values_list()") @@ -982,7 +957,7 @@ class QuerySet: def only(self, *fields): """ - Essentially, the opposite of defer. Only the fields passed into this + Essentially, the opposite of defer(). Only the fields passed into this method and that are not already specified as deferred are loaded immediately when the queryset is evaluated. """ @@ -997,9 +972,7 @@ class QuerySet: return clone def using(self, alias): - """ - Selects which database this QuerySet should execute its query against. - """ + """Select which database this QuerySet should execute against.""" clone = self._clone() clone._db = alias return clone @@ -1011,7 +984,7 @@ class QuerySet: @property def ordered(self): """ - Returns True if the QuerySet is ordered -- i.e. has an order_by() + Return True if the QuerySet is ordered -- i.e. has an order_by() clause or a default ordering on the model. """ if self.query.extra_order_by or self.query.order_by: @@ -1023,7 +996,7 @@ class QuerySet: @property def db(self): - "Return the database that will be used if this query is executed now" + """Return the database used if this query is executed now.""" if self._for_write: return self._db or router.db_for_write(self.model, **self._hints) return self._db or router.db_for_read(self.model, **self._hints) @@ -1034,7 +1007,7 @@ class QuerySet: def _insert(self, objs, fields, return_id=False, raw=False, using=None): """ - Inserts a new record for the given model. This provides an interface to + Insert a new record for the given model. This provides an interface to the InsertQuery class and is how Model.save() is implemented. """ self._for_write = True @@ -1048,9 +1021,9 @@ class QuerySet: def _batched_insert(self, objs, fields, batch_size): """ - A little helper method for bulk_insert to insert the bulk one batch - at a time. Inserts recursively a batch from the front of the bulk and - then _batched_insert() the remaining objects again. + A helper method for bulk_create() to insert the bulk one batch at a + time. Insert recursively a batch from the front of the bulk and then + _batched_insert() the remaining objects again. """ if not objs: return @@ -1090,7 +1063,7 @@ class QuerySet: def _next_is_sticky(self): """ - Indicates that the next filter call and the one following that should + Indicate that the next filter call and the one following that should be treated as a single filter. This is only important when it comes to determining when to reuse tables for many-to-many filters. Required so that we can filter naturally on the results of related managers. @@ -1103,9 +1076,7 @@ class QuerySet: return self def _merge_sanity_check(self, other): - """ - Checks that we are merging two comparable QuerySet classes. - """ + """Check that two QuerySet classes may be merged.""" if self._fields is not None and ( set(self.query.values_select) != set(other.query.values_select) or set(self.query.extra_select) != set(other.query.extra_select) or @@ -1136,17 +1107,16 @@ class QuerySet: def _add_hints(self, **hints): """ - Update hinting information for later use by Routers + Update hinting information for use by routers. Add new key/values or + overwrite existing key/values. """ - # If there is any hinting information, add it to what we already know. - # If we have a new hint for an existing key, overwrite with the new value. self._hints.update(hints) def _has_filters(self): """ - Checks if this QuerySet has any filtering going on. Note that this - isn't equivalent for checking if all objects are present in results, - for example qs[1:]._has_filters() -> False. + Check if this QuerySet has any filtering going on. This isn't + equivalent with checking if all objects are present in results, for + example, qs[1:]._has_filters() -> False. """ return self.query.has_filters() @@ -1158,7 +1128,7 @@ class InstanceCheckMeta(type): class EmptyQuerySet(metaclass=InstanceCheckMeta): """ - Marker class usable for checking if a queryset is empty by .none(): + Marker class to checking if a queryset is empty by .none(): isinstance(qs.none(), EmptyQuerySet) -> True """ @@ -1168,7 +1138,7 @@ class EmptyQuerySet(metaclass=InstanceCheckMeta): class RawQuerySet: """ - Provides an iterator which converts the results of raw SQL queries into + Provide an iterator which converts the results of raw SQL queries into annotated model instances. """ def __init__(self, raw_query, model=None, query=None, params=None, @@ -1182,9 +1152,7 @@ class RawQuerySet: self.translations = translations or {} def resolve_model_init_order(self): - """ - Resolve the init field names and value positions - """ + """Resolve the init field names and value positions.""" model_init_fields = [f for f in self.model._meta.fields if f.column in self.columns] annotation_fields = [(column, pos) for pos, column in enumerate(self.columns) if column not in self.model_fields] @@ -1240,13 +1208,11 @@ class RawQuerySet: @property def db(self): - "Return the database that will be used if this query is executed now" + """Return the database used if this query is executed now.""" return self._db or router.db_for_read(self.model, **self._hints) def using(self, alias): - """ - Selects which database this Raw QuerySet should execute its query against. - """ + """Select the database this RawQuerySet should execute against.""" return RawQuerySet( self.raw_query, model=self.model, query=self.query.clone(using=alias), @@ -1273,9 +1239,7 @@ class RawQuerySet: @cached_property def model_fields(self): - """ - A dict mapping column names to model field names. - """ + """A dict mapping column names to model field names.""" converter = connections[self.db].introspection.table_name_converter model_fields = {} for field in self.model._meta.fields: @@ -1336,9 +1300,7 @@ class Prefetch: def normalize_prefetch_lookups(lookups, prefix=None): - """ - Helper function that normalize lookups into Prefetch objects. - """ + """Normalize lookups into Prefetch objects.""" ret = [] for lookup in lookups: if not isinstance(lookup, Prefetch): @@ -1474,9 +1436,9 @@ def prefetch_related_objects(model_instances, *related_lookups): def get_prefetcher(instance, through_attr, to_attr): """ - For the attribute 'through_attr' on the given instance, finds + For the attribute 'through_attr' on the given instance, find an object that has a get_prefetch_queryset(). - Returns a 4 tuple containing: + Return a 4 tuple containing: (the object with get_prefetch_queryset (or None), the descriptor object representing this relationship (or None), a boolean that is False if the attribute was not found at all, @@ -1521,14 +1483,13 @@ def get_prefetcher(instance, through_attr, to_attr): def prefetch_one_level(instances, prefetcher, lookup, level): """ - Helper function for prefetch_related_objects + Helper function for prefetch_related_objects(). - Runs prefetches on all instances using the prefetcher object, + Run prefetches on all instances using the prefetcher object, assigning results to relevant caches in instance. - The prefetched objects are returned, along with any additional - prefetches that must be done due to prefetch_related lookups - found from default managers. + Return the prefetched objects along with any additional prefetches that + must be done due to prefetch_related lookups found from default managers. """ # prefetcher must have a method get_prefetch_queryset() which takes a list # of instances, and returns a tuple: diff --git a/django/db/models/query_utils.py b/django/db/models/query_utils.py index fe1d8f9f69..27430ae4ca 100644 --- a/django/db/models/query_utils.py +++ b/django/db/models/query_utils.py @@ -19,9 +19,7 @@ PathInfo = namedtuple('PathInfo', 'from_opts to_opts target_fields join_field m2 class InvalidQuery(Exception): - """ - The query passed to raw isn't a safe query to use with raw. - """ + """The query passed to raw() isn't a safe query to use with raw().""" pass @@ -47,7 +45,7 @@ class QueryWrapper: class Q(tree.Node): """ - Encapsulates filters as objects that can then be combined logically (using + Encapsulate filters as objects that can then be combined logically (using `&` and `|`). """ # Connection types @@ -112,8 +110,8 @@ class DeferredAttribute: def __get__(self, instance, cls=None): """ - Retrieves and caches the value from the datastore on the first lookup. - Returns the cached value. + Retrieve and caches the value from the datastore on the first lookup. + Return the cached value. """ if instance is None: return self @@ -211,7 +209,7 @@ class RegisterLookupMixin: def select_related_descend(field, restricted, requested, load_fields, reverse=False): """ - Returns True if this field should be used to descend deeper for + Return True if this field should be used to descend deeper for select_related() purposes. Used by both the query construction code (sql.query.fill_related_selections()) and the model instance creation code (query.get_klass_info()). @@ -247,10 +245,9 @@ def select_related_descend(field, restricted, requested, load_fields, reverse=Fa def refs_expression(lookup_parts, annotations): """ - A helper method to check if the lookup_parts contains references - to the given annotations set. Because the LOOKUP_SEP is contained in the - default annotation names we must check each prefix of the lookup_parts - for a match. + Check if the lookup_parts contains references to the given annotations set. + Because the LOOKUP_SEP is contained in the default annotation names, check + each prefix of the lookup_parts for a match. """ for n in range(len(lookup_parts) + 1): level_n_lookup = LOOKUP_SEP.join(lookup_parts[0:n]) diff --git a/django/db/models/sql/compiler.py b/django/db/models/sql/compiler.py index 76a595d2b1..3215aaaa9c 100644 --- a/django/db/models/sql/compiler.py +++ b/django/db/models/sql/compiler.py @@ -38,7 +38,7 @@ class SQLCompiler: def pre_sql_setup(self): """ - Does any necessary class setup immediately prior to producing SQL. This + Do any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time. """ @@ -51,7 +51,7 @@ class SQLCompiler: def get_group_by(self, select, order_by): """ - Returns a list of 2-tuples of form (sql, params). + Return a list of 2-tuples of form (sql, params). The logic of what exactly the GROUP BY clause contains is hard to describe in other words than "if it passes the test suite, @@ -163,7 +163,7 @@ class SQLCompiler: def get_select(self): """ - Returns three values: + Return three values: - a list of 3-tuples of (expression, (sql, params), alias) - a klass_info structure, - a dictionary of annotations @@ -232,7 +232,7 @@ class SQLCompiler: def get_order_by(self): """ - Returns a list of 2-tuples of form (expr, (sql, params, is_ref)) for the + Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the ORDER BY clause. The order_by clause can alter the select clause (for example it @@ -400,7 +400,7 @@ class SQLCompiler: def as_sql(self, with_limits=True, with_col_aliases=False): """ - Creates the SQL for this query. Returns the SQL string and list of + Create the SQL for this query. Return the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included @@ -513,14 +513,14 @@ class SQLCompiler: def get_default_columns(self, start_alias=None, opts=None, from_parent=None): """ - Computes the default columns for selecting every field in the base + Compute the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via select_related), in which case "opts" and "start_alias" will be given to provide a starting point for the traversal. - Returns a list of strings, quoted appropriately for use in SQL + Return a list of strings, quoted appropriately for use in SQL directly, as well as a set of aliases used in the select statement (if - 'as_pairs' is True, returns a list of (alias, col_name) pairs instead + 'as_pairs' is True, return a list of (alias, col_name) pairs instead of strings as the first component and None as the second component). """ result = [] @@ -558,10 +558,10 @@ class SQLCompiler: def get_distinct(self): """ - Returns a quoted list of fields to use in DISTINCT ON part of the query. + Return a quoted list of fields to use in DISTINCT ON part of the query. - Note that this method can alter the tables in the query, and thus it - must be called before get_from_clause(). + This method can alter the tables in the query, and thus it must be + called before get_from_clause(). """ qn = self.quote_name_unless_alias qn2 = self.connection.ops.quote_name @@ -582,7 +582,7 @@ class SQLCompiler: def find_ordering_name(self, name, opts, alias=None, default_order='ASC', already_seen=None): """ - Returns the table alias (the name might be ambiguous, the alias will + Return the table alias (the name might be ambiguous, the alias will not be) and column name for ordering by the given 'name' parameter. The 'name' is of the form 'field1__field2__...__fieldN'. """ @@ -613,11 +613,11 @@ class SQLCompiler: def _setup_joins(self, pieces, opts, alias): """ - A helper method for get_order_by and get_distinct. + Helper method for get_order_by() and get_distinct(). - Note that get_ordering and get_distinct must produce same target - columns on same input, as the prefixes of get_ordering and get_distinct - must match. Executing SQL where this is not true is an error. + get_ordering() and get_distinct() must produce same target columns on + same input, as the prefixes of get_ordering() and get_distinct() must + match. Executing SQL where this is not true is an error. """ if not alias: alias = self.query.get_initial_alias() @@ -628,14 +628,14 @@ class SQLCompiler: def get_from_clause(self): """ - Returns a list of strings that are joined together to go after the + Return a list of strings that are joined together to go after the "FROM" part of the query, as well as a list any extra parameters that - need to be included. Sub-classes, can override this to create a + need to be included. Subclasses, can override this to create a from-clause via a "select". This should only be called after any SQL construction methods that - might change the tables we need. This means the select columns, - ordering and distinct must be done first. + might change the tables that are needed. This means the select columns, + ordering, and distinct must be done first. """ result = [] params = [] @@ -792,8 +792,8 @@ class SQLCompiler: def deferred_to_columns(self): """ - Converts the self.deferred_loading data structure to mapping of table - names to sets of column names which are to be loaded. Returns the + Convert the self.deferred_loading data structure to mapping of table + names to sets of column names which are to be loaded. Return the dictionary. """ columns = {} @@ -820,9 +820,7 @@ class SQLCompiler: return tuple(row) def results_iter(self, results=None): - """ - Returns an iterator over the results from executing this query. - """ + """Return an iterator over the results from executing this query.""" if results is None: results = self.execute_sql(MULTI) fields = [s[0] for s in self.select[0:self.col_count]] @@ -845,7 +843,7 @@ class SQLCompiler: def execute_sql(self, result_type=MULTI, chunked_fetch=False): """ - Run the query against the database and returns the result(s). The + Run the query against the database and return the result(s). The return value is a single data item if result_type is SINGLE, or an iterator over the results if the result_type is MULTI. @@ -933,10 +931,10 @@ class SQLInsertCompiler(SQLCompiler): def field_as_sql(self, field, val): """ Take a field and a value intended to be saved on that field, and - return placeholder SQL and accompanying params. Checks for raw values, - expressions and fields with get_placeholder() defined in that order. + return placeholder SQL and accompanying params. Check for raw values, + expressions, and fields with get_placeholder() defined in that order. - When field is None, the value is considered raw and is used as the + When field is None, consider the value raw and use it as the placeholder, with no corresponding parameters returned. """ if field is None: @@ -994,9 +992,9 @@ class SQLInsertCompiler(SQLCompiler): def assemble_as_sql(self, fields, value_rows): """ - Take a sequence of N fields and a sequence of M rows of values, - generate placeholder SQL and parameters for each field and value, and - return a pair containing: + Take a sequence of N fields and a sequence of M rows of values, and + generate placeholder SQL and parameters for each field and value. + Return a pair containing: * a sequence of M rows of N SQL placeholder strings, and * a sequence of M rows of corresponding parameter values. @@ -1105,7 +1103,7 @@ class SQLInsertCompiler(SQLCompiler): class SQLDeleteCompiler(SQLCompiler): def as_sql(self): """ - Creates the SQL for this query. Returns the SQL string and list of + Create the SQL for this query. Return the SQL string and list of parameters. """ assert len([t for t in self.query.tables if self.query.alias_refcount[t] > 0]) == 1, \ @@ -1121,7 +1119,7 @@ class SQLDeleteCompiler(SQLCompiler): class SQLUpdateCompiler(SQLCompiler): def as_sql(self): """ - Creates the SQL for this query. Returns the SQL string and list of + Create the SQL for this query. Return the SQL string and list of parameters. """ self.pre_sql_setup() @@ -1176,7 +1174,7 @@ class SQLUpdateCompiler(SQLCompiler): def execute_sql(self, result_type): """ - Execute the specified update. Returns the number of rows affected by + Execute the specified update. Return the number of rows affected by the primary update query. The "primary update query" is the first non-empty query that is executed. Row counts for any subsequent, related queries are not available. @@ -1197,13 +1195,12 @@ class SQLUpdateCompiler(SQLCompiler): def pre_sql_setup(self): """ - If the update depends on results from other tables, we need to do some - munging of the "where" conditions to match the format required for - (portable) SQL updates. That is done here. + If the update depends on results from other tables, munge the "where" + conditions to match the format required for (portable) SQL updates. - Further, if we are going to be running multiple updates, we pull out - the id values to update at this point so that they don't change as a - result of the progressive updates. + If multiple updates are required, pull out the id values to update at + this point so that they don't change as a result of the progressive + updates. """ refcounts_before = self.query.alias_refcount.copy() # Ensure base table is in the query @@ -1242,7 +1239,7 @@ class SQLUpdateCompiler(SQLCompiler): class SQLAggregateCompiler(SQLCompiler): def as_sql(self): """ - Creates the SQL for this query. Returns the SQL string and list of + Create the SQL for this query. Return the SQL string and list of parameters. """ sql, params = [], [] @@ -1261,7 +1258,7 @@ class SQLAggregateCompiler(SQLCompiler): def cursor_iter(cursor, sentinel, col_count): """ - Yields blocks of rows from a cursor and ensures the cursor is closed when + Yield blocks of rows from a cursor and ensure the cursor is closed when done. """ try: diff --git a/django/db/models/sql/datastructures.py b/django/db/models/sql/datastructures.py index 81b6cabdc6..788c2dd669 100644 --- a/django/db/models/sql/datastructures.py +++ b/django/db/models/sql/datastructures.py @@ -59,7 +59,7 @@ class Join: def as_sql(self, compiler, connection): """ - Generates the full + Generate the full LEFT OUTER JOIN sometable ON sometable.somecol = othertable.othercol, params clause for this join. """ diff --git a/django/db/models/sql/query.py b/django/db/models/sql/query.py index 9040c5a5a4..7af04e7d77 100644 --- a/django/db/models/sql/query.py +++ b/django/db/models/sql/query.py @@ -44,9 +44,7 @@ def get_field_names_from_opts(opts): class RawQuery: - """ - A single raw SQL query - """ + """A single raw SQL query.""" def __init__(self, sql, using, params=None, context=None): self.params = params or () @@ -112,9 +110,7 @@ class RawQuery: class Query: - """ - A single SQL query. - """ + """A single SQL query.""" alias_prefix = 'T' subq_aliases = frozenset([alias_prefix]) @@ -221,7 +217,7 @@ class Query: def __str__(self): """ - Returns the query as a string of SQL with the parameter values + Return the query as a string of SQL with the parameter values substituted in (use sql_with_params() to see the unsubstituted string). Parameter values won't necessarily be quoted correctly, since that is @@ -232,7 +228,7 @@ class Query: def sql_with_params(self): """ - Returns the query as an SQL string and the parameters that will be + Return the query as an SQL string and the parameters that will be substituted into the query. """ return self.get_compiler(DEFAULT_DB_ALIAS).as_sql() @@ -254,7 +250,7 @@ class Query: def get_meta(self): """ - Returns the Options instance (the model._meta) from which to start + Return the Options instance (the model._meta) from which to start processing. Normally, this is self.model._meta, but it can be changed by subclasses. """ @@ -262,7 +258,7 @@ class Query: def clone(self, klass=None, memo=None, **kwargs): """ - Creates a copy of the current instance. The 'kwargs' parameter can be + Create a copy of the current instance. The 'kwargs' parameter can be used by clients to update attributes after copying has taken place. """ obj = Empty() @@ -395,7 +391,7 @@ class Query: def get_aggregation(self, using, added_aggregate_names): """ - Returns the dictionary with the values of the existing aggregations. + Return the dictionary with the values of the existing aggregations. """ if not self.annotation_select: return {} @@ -488,7 +484,7 @@ class Query: def get_count(self, using): """ - Performs a COUNT() query using the current filter constraints. + Perform a COUNT() query using the current filter constraints. """ obj = self.clone() obj.add_annotation(Count('*'), alias='__count', is_summary=True) @@ -613,7 +609,7 @@ class Query: def deferred_to_data(self, target, callback): """ - Converts the self.deferred_loading data structure to an alternate data + Convert the self.deferred_loading data structure to an alternate data structure, describing the field that *will* be loaded. This is used to compute the columns to select from the database and also by the QuerySet class to work out which fields are being initialized on each @@ -699,7 +695,7 @@ class Query: def table_alias(self, table_name, create=False): """ - Returns a table alias for the given table_name and whether this is a + Return a table alias for the given table_name and whether this is a new alias or not. If 'create' is true, a new alias is always created. Otherwise, the @@ -724,17 +720,17 @@ class Query: return alias, True def ref_alias(self, alias): - """ Increases the reference count for this alias. """ + """Increases the reference count for this alias.""" self.alias_refcount[alias] += 1 def unref_alias(self, alias, amount=1): - """ Decreases the reference count for this alias. """ + """Decreases the reference count for this alias.""" self.alias_refcount[alias] -= amount def promote_joins(self, aliases): """ - Promotes recursively the join type of given aliases and its children to - an outer join. If 'unconditional' is False, the join is only promoted if + Promote recursively the join type of given aliases and its children to + an outer join. If 'unconditional' is False, only promote the join if it is nullable or the parent join is an outer join. The children promotion is done to avoid join chains that contain a LOUTER @@ -786,8 +782,8 @@ class Query: def reset_refcounts(self, to_counts): """ - This method will reset reference counts for aliases so that they match - the value passed in :param to_counts:. + Reset reference counts for aliases so that they match the value passed + in `to_counts`. """ for alias, cur_refcount in self.alias_refcount.copy().items(): unref_amount = cur_refcount - to_counts.get(alias, 0) @@ -795,7 +791,7 @@ class Query: def change_aliases(self, change_map): """ - Changes the aliases in change_map (which maps old-alias -> new-alias), + Change the aliases in change_map (which maps old-alias -> new-alias), relabelling any references to them in select columns and the where clause. """ @@ -831,14 +827,14 @@ class Query: def bump_prefix(self, outer_query): """ - Changes the alias prefix to the next letter in the alphabet in a way + Change the alias prefix to the next letter in the alphabet in a way that the outer query's aliases and this query's aliases will not conflict. Even tables that previously had no alias will get an alias after this call. """ def prefix_gen(): """ - Generates a sequence of characters in alphabetical order: + Generate a sequence of characters in alphabetical order: -> 'A', 'B', 'C', ... When the alphabet is finished, the sequence will continue with the @@ -878,7 +874,7 @@ class Query: def get_initial_alias(self): """ - Returns the first alias for this query, after increasing its reference + Return the first alias for this query, after increasing its reference count. """ if self.tables: @@ -890,15 +886,15 @@ class Query: def count_active_tables(self): """ - Returns the number of tables in this query with a non-zero reference - count. Note that after execution, the reference counts are zeroed, so - tables added in compiler will not be seen by this method. + Return the number of tables in this query with a non-zero reference + count. After execution, the reference counts are zeroed, so tables + added in compiler will not be seen by this method. """ return len([1 for count in self.alias_refcount.values() if count]) def join(self, join, reuse=None): """ - Returns an alias for the join in 'connection', either reusing an + Return an alias for the join in 'connection', either reusing an existing alias for that join or creating a new one. 'connection' is a tuple (lhs, table, join_cols) where 'lhs' is either an existing table alias or a table name. 'join_cols' is a tuple of tuples containing @@ -940,7 +936,7 @@ class Query: def join_parent_model(self, opts, model, alias, seen): """ - Makes sure the given 'model' is joined in the query. If 'model' isn't + Make sure the given 'model' is joined in the query. If 'model' isn't a parent of 'opts' or if it is None this method is a no-op. The 'alias' is the root alias for starting the join, 'seen' is a dict @@ -973,9 +969,7 @@ class Query: return alias or seen[None] def add_annotation(self, annotation, alias, is_summary=False): - """ - Adds a single annotation expression to the Query - """ + """Add a single annotation expression to the Query.""" annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None, summarize=is_summary) self.append_annotation_mask([alias]) @@ -1031,7 +1025,7 @@ class Query: def solve_lookup_type(self, lookup): """ - Solve the lookup type from the lookup (eg: 'foobar__id__icontains') + Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains'). """ lookup_splitted = lookup.split(LOOKUP_SEP) if self._annotations: @@ -1051,8 +1045,8 @@ class Query: def check_query_object_type(self, value, opts, field): """ - Checks whether the object passed while querying is of the correct type. - If not, it raises a ValueError specifying the wrong object. + Check whether the object passed while querying is of the correct type. + If not, raise a ValueError specifying the wrong object. """ if hasattr(value, '_meta'): if not check_rel_lookup_compatibility(value._meta.model, opts, field): @@ -1061,9 +1055,7 @@ class Query: (value, opts.object_name)) def check_related_objects(self, field, value, opts): - """ - Checks the type of object passed to query relations. - """ + """Check the type of object passed to query relations.""" if field.is_relation: # Check that the field and the queryset use the same model in a # query like .filter(author=Author.objects.all()). For example, the @@ -1087,7 +1079,7 @@ class Query: def build_lookup(self, lookups, lhs, rhs): """ - Tries to extract transforms and lookup from given lhs. + Try to extract transforms and lookup from given lhs. The lhs value is something that works like SQLExpression. The rhs value is what the lookup is going to compare against. @@ -1114,7 +1106,7 @@ class Query: def try_transform(self, lhs, name, rest_of_lookups): """ - Helper method for build_lookup. Tries to fetch and initialize + Helper method for build_lookup(). Try to fetch and initialize a transform for name parameter from lhs. """ transform_class = lhs.get_transform(name) @@ -1129,7 +1121,7 @@ class Query: def build_filter(self, filter_expr, branch_negated=False, current_negated=False, can_reuse=None, connector=AND, allow_joins=True, split_subq=True): """ - Builds a WhereNode for a single filter clause, but doesn't add it + Build a WhereNode for a single filter clause but don't add it to this Query. Query.add_q() will then add this filter to the where Node. @@ -1140,7 +1132,7 @@ class Query: negated or not and this will be used to determine if IS NULL filtering is needed. - The difference between current_netageted and branch_negated is that + The difference between current_negated and branch_negated is that branch_negated is set on first negation, but current_negated is flipped for each negation. @@ -1263,9 +1255,7 @@ class Query: def _add_q(self, q_object, used_aliases, branch_negated=False, current_negated=False, allow_joins=True, split_subq=True): - """ - Adds a Q-object to the current filter. - """ + """Add a Q-object to the current filter.""" connector = q_object.connector current_negated = current_negated ^ q_object.negated branch_negated = branch_negated or q_object.negated @@ -1292,20 +1282,18 @@ class Query: def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False): """ - Walks the list of names and turns them into PathInfo tuples. Note that - a single name in 'names' can generate multiple PathInfos (m2m for - example). + Walk the list of names and turns them into PathInfo tuples. A single + name in 'names' can generate multiple PathInfos (m2m, for example). 'names' is the path of names to travel, 'opts' is the model Options we start the name resolving from, 'allow_many' is as for setup_joins(). If fail_on_missing is set to True, then a name that can't be resolved will generate a FieldError. - Returns a list of PathInfo tuples. In addition returns the final field - (the last used join field), and target (which is a field guaranteed to - contain the same value as the final field). Finally, the method returns - those names that weren't found (which are likely transforms and the - final lookup). + Return a list of PathInfo tuples. In addition return the final field + (the last used join field) and target (which is a field guaranteed to + contain the same value as the final field). Finally, return those names + that weren't found (which are likely transforms and the final lookup). """ path, names_with_path = [], [] for pos, name in enumerate(names): @@ -1397,7 +1385,7 @@ class Query: If 'allow_many' is False, then any reverse foreign key seen will generate a MultiJoin exception. - Returns the final field involved in the joins, the target field (used + Return the final field involved in the joins, the target field (used for any 'where' constraint), the final 'opts' value, the joins and the field path travelled to generate the joins. @@ -1433,13 +1421,12 @@ class Query: is the full list of join aliases. The 'path' contain the PathInfos used to create the joins. - Returns the final target field and table alias and the new active + Return the final target field and table alias and the new active joins. - We will always trim any direct join if we have the target column - available already in the previous table. Reverse joins can't be - trimmed as we don't know if there is anything on the other side of - the join. + Always trim any direct join if the target column is already in the + previous table. Can't trim reverse joins as it's unknown if there's + anything on the other side of the join. """ joins = joins[:] for pos, info in enumerate(reversed(path)): @@ -1555,13 +1542,12 @@ class Query: def set_limits(self, low=None, high=None): """ - Adjusts the limits on the rows retrieved. We use low/high to set these, + Adjust the limits on the rows retrieved. Use low/high to set these, as it makes it more Pythonic to read and write. When the SQL query is - created, they are converted to the appropriate offset and limit values. + created, convert them to the appropriate offset and limit values. - Any limits passed in here are applied relative to the existing - constraints. So low is added to the current low value and both will be - clamped to any existing high value. + Apply any limits passed in here to the existing constraints. Add low + to the current low value and clamp both to any existing high value. """ if high is not None: if self.high_mark is not None: @@ -1578,23 +1564,19 @@ class Query: self.set_empty() def clear_limits(self): - """ - Clears any existing limits. - """ + """Clear any existing limits.""" self.low_mark, self.high_mark = 0, None def can_filter(self): """ - Returns True if adding filters to this instance is still possible. + Return True if adding filters to this instance is still possible. Typically, this means no limits or offsets have been put on the results. """ return not self.low_mark and self.high_mark is None def clear_select_clause(self): - """ - Removes all fields from SELECT clause. - """ + """Remove all fields from SELECT clause.""" self.select = [] self.default_cols = False self.select_related = False @@ -1603,7 +1585,7 @@ class Query: def clear_select_fields(self): """ - Clears the list of fields to select (but not extra_select columns). + Clear the list of fields to select (but not extra_select columns). Some queryset types completely replace any existing list of select columns. """ @@ -1620,15 +1602,15 @@ class Query: def add_distinct_fields(self, *field_names): """ - Adds and resolves the given fields to the query's "distinct on" clause. + Add and resolve the given fields to the query's "distinct on" clause. """ self.distinct_fields = field_names self.distinct = True def add_fields(self, field_names, allow_m2m=True): """ - Adds the given (model) fields to the select set. The field names are - added in the order specified. + Add the given (model) fields to the select set. Add the field names in + the order specified. """ alias = self.get_initial_alias() opts = self.get_meta() @@ -1656,12 +1638,12 @@ class Query: def add_ordering(self, *ordering): """ - Adds items from the 'ordering' sequence to the query's "order by" + Add items from the 'ordering' sequence to the query's "order by" clause. These items are either field names (not column names) -- possibly with a direction prefix ('-' or '?') -- or OrderBy expressions. - If 'ordering' is empty, all ordering is cleared from the query. + If 'ordering' is empty, clear all ordering from the query. """ errors = [] for item in ordering: @@ -1681,7 +1663,7 @@ class Query: def clear_ordering(self, force_empty): """ - Removes any ordering settings. If 'force_empty' is True, there will be + Remove any ordering settings. If 'force_empty' is True, there will be no ordering in the resulting query (not even the model's default). """ self.order_by = [] @@ -1691,7 +1673,7 @@ class Query: def set_group_by(self): """ - Expands the GROUP BY clause required by the query. + Expand the GROUP BY clause required by the query. This will usually be the set of all non-aggregate fields in the return data. If the database backend supports grouping by the @@ -1710,7 +1692,7 @@ class Query: def add_select_related(self, fields): """ - Sets up the select_related data structure so that we only select + Set up the select_related data structure so that we only select certain related models (as opposed to all models, when self.select_related=True). """ @@ -1726,7 +1708,7 @@ class Query: def add_extra(self, select, select_params, where, params, tables, order_by): """ - Adds data to the various extra_* attributes for user-created additions + Add data to the various extra_* attributes for user-created additions to the query. """ if select: @@ -1758,16 +1740,14 @@ class Query: self.extra_order_by = order_by def clear_deferred_loading(self): - """ - Remove any fields from the deferred loading set. - """ + """Remove any fields from the deferred loading set.""" self.deferred_loading = (set(), True) def add_deferred_loading(self, field_names): """ Add the given list of model field names to the set of fields to exclude from loading from the database when automatic column selection - is done. The new field names are added to any existing field names that + is done. Add the new field names to any existing field names that are deferred (or removed from any existing field names that are marked as the only ones for immediate loading). """ @@ -1788,8 +1768,8 @@ class Query: Add the given list of model field names to the set of fields to retrieve when the SQL is executed ("immediate loading" fields). The field names replace any existing immediate loading field names. If - there are field names already specified for deferred loading, those - names are removed from the new field_names before storing the new names + there are field names already specified for deferred loading, remove + those names from the new field_names before storing the new names for immediate loading. (That is, immediate loading overrides any existing immediate values, but respects existing deferrals.) """ @@ -1809,12 +1789,12 @@ class Query: def get_loaded_field_names(self): """ - If any fields are marked to be deferred, returns a dictionary mapping + If any fields are marked to be deferred, return a dictionary mapping models to a set of names in those fields that will be loaded. If a model is not in the returned dictionary, none of its fields are deferred. - If no fields are marked for deferral, returns an empty dictionary. + If no fields are marked for deferral, return an empty dictionary. """ # We cache this because we call this function multiple times # (compiler.fill_related_selections, query.iterator) @@ -1827,13 +1807,11 @@ class Query: return collection def get_loaded_field_names_cb(self, target, model, fields): - """ - Callback used by get_deferred_field_names(). - """ + """Callback used by get_deferred_field_names().""" target[model] = {f.attname for f in fields} def set_annotation_mask(self, names): - "Set the mask of annotations that will actually be returned by the SELECT" + """Set the mask of annotations that will be returned by the SELECT.""" if names is None: self.annotation_select_mask = None else: @@ -1846,9 +1824,8 @@ class Query: def set_extra_mask(self, names): """ - Set the mask of extra select items that will be returned by SELECT, - we don't actually remove them from the Query since they might be used - later + Set the mask of extra select items that will be returned by SELECT. + Don't remove them from the Query since they might be used later. """ if names is None: self.extra_select_mask = None @@ -1893,10 +1870,9 @@ class Query: @property def annotation_select(self): - """The OrderedDict of aggregate columns that are not masked, and should - be used in the SELECT clause. - - This result is cached for optimization purposes. + """ + Return the OrderedDict of aggregate columns that are not masked and + should be used in the SELECT clause. Cache this result for performance. """ if self._annotation_select_cache is not None: return self._annotation_select_cache @@ -1928,16 +1904,16 @@ class Query: def trim_start(self, names_with_path): """ - Trims joins from the start of the join path. The candidates for trim + Trim joins from the start of the join path. The candidates for trim are the PathInfos in names_with_path structure that are m2m joins. - Also sets the select column so the start matches the join. + Also set the select column so the start matches the join. This method is meant to be used for generating the subquery joins & cols in split_exclude(). - Returns a lookup usable for doing outerq.filter(lookup=self). Returns - also if the joins in the prefix contain a LEFT OUTER join. + Return a lookup usable for doing outerq.filter(lookup=self) and a + boolean indicating if the joins in the prefix contain a LEFT OUTER join. _""" all_paths = [] for _, paths in names_with_path: @@ -1995,7 +1971,7 @@ class Query: def is_nullable(self, field): """ - A helper to check if the given field should be treated as nullable. + Check if the given field should be treated as nullable. Some backends treat '' as null and Django treats such fields as nullable for those backends. In such situations field.null can be @@ -2025,7 +2001,7 @@ class Query: def get_order_dir(field, default='ASC'): """ - Returns the field name and direction for an order specification. For + Return the field name and direction for an order specification. For example, '-foo' is returned as ('foo', 'DESC'). The 'default' param is used to indicate which way no prefix (or a '+' @@ -2039,8 +2015,8 @@ def get_order_dir(field, default='ASC'): def add_to_dict(data, key, value): """ - A helper function to add "value" to the set of values for "key", whether or - not "key" already exists. + Add "value" to the set of values for "key", whether or not "key" already + exists. """ if key in data: data[key].add(value) @@ -2050,8 +2026,8 @@ def add_to_dict(data, key, value): def is_reverse_o2o(field): """ - A little helper to check if the given field is reverse-o2o. The field is - expected to be some sort of relation field or related object. + Check if the given field is reverse-o2o. The field is expected to be some + sort of relation field or related object. """ return field.is_relation and field.one_to_one and not field.concrete diff --git a/django/db/models/sql/subqueries.py b/django/db/models/sql/subqueries.py index 03a5155b9b..65e944baa9 100644 --- a/django/db/models/sql/subqueries.py +++ b/django/db/models/sql/subqueries.py @@ -14,10 +14,7 @@ __all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'AggregateQuery'] class DeleteQuery(Query): - """ - Delete queries are done through this class, since they are more constrained - than general queries. - """ + """A DELETE SQL query.""" compiler = 'SQLDeleteCompiler' @@ -81,9 +78,7 @@ class DeleteQuery(Query): class UpdateQuery(Query): - """ - Represents an "update" SQL query. - """ + """An UPDATE SQL query.""" compiler = 'SQLUpdateCompiler' @@ -93,7 +88,7 @@ class UpdateQuery(Query): def _setup_query(self): """ - Runs on initialization and after cloning. Any attributes that would + Run on initialization and after cloning. Any attributes that would normally be set in __init__ should go in here, instead, so that they are also set up after a clone() call. """ @@ -148,15 +143,15 @@ class UpdateQuery(Query): def add_related_update(self, model, field, value): """ - Adds (name, value) to an update query for an ancestor model. + Add (name, value) to an update query for an ancestor model. - Updates are coalesced so that we only run one update query per ancestor. + Update are coalesced so that only one update query per ancestor is run. """ self.related_updates.setdefault(model, []).append((field, None, value)) def get_related_updates(self): """ - Returns a list of query objects: one for each update required to an + Return a list of query objects: one for each update required to an ancestor model. Each query will have the same filtering conditions as the current query but will only update a single table. """ @@ -181,15 +176,6 @@ class InsertQuery(Query): self.objs = [] def insert_values(self, fields, objs, raw=False): - """ - Set up the insert query from the 'insert_values' dictionary. The - dictionary gives the model field names and their target values. - - If 'raw_values' is True, the values in the 'insert_values' dictionary - are inserted directly into the query, rather than passed as SQL - parameters. This provides a way to insert NULL and DEFAULT keywords - into the query, for example. - """ self.fields = fields self.objs = objs self.raw = raw @@ -197,8 +183,8 @@ class InsertQuery(Query): class AggregateQuery(Query): """ - An AggregateQuery takes another query as a parameter to the FROM - clause and only selects the elements in the provided list. + Take another query as a parameter to the FROM clause and only select the + elements in the provided list. """ compiler = 'SQLAggregateCompiler' diff --git a/django/db/models/sql/where.py b/django/db/models/sql/where.py index 7ce7617bfa..ed24b08bd0 100644 --- a/django/db/models/sql/where.py +++ b/django/db/models/sql/where.py @@ -13,7 +13,7 @@ OR = 'OR' class WhereNode(tree.Node): """ - Used to represent the SQL where-clause. + An SQL WHERE clause. The class is tied to the Query class that created it (in order to create the correct SQL). @@ -29,7 +29,7 @@ class WhereNode(tree.Node): def split_having(self, negated=False): """ - Returns two possibly None nodes: one for those parts of self that + Return two possibly None nodes: one for those parts of self that should be included in the WHERE clause and one for those parts of self that must be included in the HAVING clause. """ @@ -62,9 +62,9 @@ class WhereNode(tree.Node): def as_sql(self, compiler, connection): """ - Returns the SQL version of the where clause and the value to be - substituted in. Returns '', [] if this node matches everything, - None, [] if this node is empty, and raises EmptyResultSet if this + Return the SQL version of the where clause and the value to be + substituted in. Return '', [] if this node matches everything, + None, [] if this node is empty, and raise EmptyResultSet if this node can't match anything. """ result = [] @@ -127,7 +127,7 @@ class WhereNode(tree.Node): def relabel_aliases(self, change_map): """ - Relabels the alias values of any children. 'change_map' is a dictionary + Relabel the alias values of any children. 'change_map' is a dictionary mapping old (current) alias values to the new values. """ for pos, child in enumerate(self.children): @@ -139,7 +139,7 @@ class WhereNode(tree.Node): def clone(self): """ - Creates a clone of the tree. Must only be called on root nodes (nodes + Create a clone of the tree. Must only be called on root nodes (nodes with empty subtree_parents). Childs must be either (Contraint, lookup, value) tuples, or objects supporting .clone(). """ @@ -173,9 +173,7 @@ class WhereNode(tree.Node): class NothingNode: - """ - A node that matches nothing. - """ + """A node that matches nothing.""" contains_aggregate = False def as_sql(self, compiler=None, connection=None): diff --git a/django/db/models/utils.py b/django/db/models/utils.py index 7d563c4ae0..b966029d63 100644 --- a/django/db/models/utils.py +++ b/django/db/models/utils.py @@ -1,8 +1,8 @@ def make_model_tuple(model): """ - Takes a model or a string of the form "app_label.ModelName" and returns a + Take a model or a string of the form "app_label.ModelName" and return a corresponding ("app_label", "modelname") tuple. If a tuple is passed in, - it's assumed to be a valid model tuple already and returned unchanged. + assume it's a valid model tuple already and return it unchanged. """ try: if isinstance(model, tuple): |
