Coverage for /Users/davegaeddert/Developer/dropseed/plain/plain-models/plain/models/base.py: 14%

983 statements  

« prev     ^ index     » next       coverage.py v7.6.9, created at 2024-12-23 11:16 -0600

1import copy 

2import inspect 

3import warnings 

4from functools import partialmethod 

5from itertools import chain 

6 

7import plain.runtime 

8from plain import preflight 

9from plain.exceptions import ( 

10 NON_FIELD_ERRORS, 

11 FieldDoesNotExist, 

12 FieldError, 

13 MultipleObjectsReturned, 

14 ObjectDoesNotExist, 

15 ValidationError, 

16) 

17from plain.models import transaction 

18from plain.models.aggregates import Max 

19from plain.models.constants import LOOKUP_SEP 

20from plain.models.constraints import CheckConstraint, UniqueConstraint 

21from plain.models.db import ( 

22 PLAIN_VERSION_PICKLE_KEY, 

23 DatabaseError, 

24 connection, 

25 connections, 

26 router, 

27) 

28from plain.models.deletion import CASCADE, Collector 

29from plain.models.expressions import ExpressionWrapper, RawSQL, Value 

30from plain.models.fields import NOT_PROVIDED, IntegerField 

31from plain.models.fields.related import ( 

32 ForeignObjectRel, 

33 OneToOneField, 

34 lazy_related_operation, 

35 resolve_relation, 

36) 

37from plain.models.functions import Coalesce 

38from plain.models.manager import Manager 

39from plain.models.options import Options 

40from plain.models.query import F, Q 

41from plain.models.signals import ( 

42 class_prepared, 

43 post_init, 

44 post_save, 

45 pre_init, 

46 pre_save, 

47) 

48from plain.models.utils import AltersData, make_model_tuple 

49from plain.packages import packages 

50from plain.utils.encoding import force_str 

51from plain.utils.hashable import make_hashable 

52 

53 

54class Deferred: 

55 def __repr__(self): 

56 return "<Deferred field>" 

57 

58 def __str__(self): 

59 return "<Deferred field>" 

60 

61 

62DEFERRED = Deferred() 

63 

64 

65def subclass_exception(name, bases, module, attached_to): 

66 """ 

67 Create exception subclass. Used by ModelBase below. 

68 

69 The exception is created in a way that allows it to be pickled, assuming 

70 that the returned exception class will be added as an attribute to the 

71 'attached_to' class. 

72 """ 

73 return type( 

74 name, 

75 bases, 

76 { 

77 "__module__": module, 

78 "__qualname__": f"{attached_to.__qualname__}.{name}", 

79 }, 

80 ) 

81 

82 

83def _has_contribute_to_class(value): 

84 # Only call contribute_to_class() if it's bound. 

85 return not inspect.isclass(value) and hasattr(value, "contribute_to_class") 

86 

87 

88class ModelBase(type): 

89 """Metaclass for all models.""" 

90 

91 def __new__(cls, name, bases, attrs, **kwargs): 

92 super_new = super().__new__ 

93 

94 # Also ensure initialization is only performed for subclasses of Model 

95 # (excluding Model class itself). 

96 parents = [b for b in bases if isinstance(b, ModelBase)] 

97 if not parents: 

98 return super_new(cls, name, bases, attrs) 

99 

100 # Create the class. 

101 module = attrs.pop("__module__") 

102 new_attrs = {"__module__": module} 

103 classcell = attrs.pop("__classcell__", None) 

104 if classcell is not None: 

105 new_attrs["__classcell__"] = classcell 

106 attr_meta = attrs.pop("Meta", None) 

107 # Pass all attrs without a (Plain-specific) contribute_to_class() 

108 # method to type.__new__() so that they're properly initialized 

109 # (i.e. __set_name__()). 

110 contributable_attrs = {} 

111 for obj_name, obj in attrs.items(): 

112 if _has_contribute_to_class(obj): 

113 contributable_attrs[obj_name] = obj 

114 else: 

115 new_attrs[obj_name] = obj 

116 new_class = super_new(cls, name, bases, new_attrs, **kwargs) 

117 

118 abstract = getattr(attr_meta, "abstract", False) 

119 meta = attr_meta or getattr(new_class, "Meta", None) 

120 base_meta = getattr(new_class, "_meta", None) 

121 

122 package_label = None 

123 

124 # Look for an application configuration to attach the model to. 

125 package_config = packages.get_containing_package_config(module) 

126 

127 if getattr(meta, "package_label", None) is None: 

128 if package_config is None: 

129 if not abstract: 

130 raise RuntimeError( 

131 f"Model class {module}.{name} doesn't declare an explicit " 

132 "package_label and isn't in an application in " 

133 "INSTALLED_PACKAGES." 

134 ) 

135 

136 else: 

137 package_label = package_config.label 

138 

139 new_class.add_to_class("_meta", Options(meta, package_label)) 

140 if not abstract: 

141 new_class.add_to_class( 

142 "DoesNotExist", 

143 subclass_exception( 

144 "DoesNotExist", 

145 tuple( 

146 x.DoesNotExist 

147 for x in parents 

148 if hasattr(x, "_meta") and not x._meta.abstract 

149 ) 

150 or (ObjectDoesNotExist,), 

151 module, 

152 attached_to=new_class, 

153 ), 

154 ) 

155 new_class.add_to_class( 

156 "MultipleObjectsReturned", 

157 subclass_exception( 

158 "MultipleObjectsReturned", 

159 tuple( 

160 x.MultipleObjectsReturned 

161 for x in parents 

162 if hasattr(x, "_meta") and not x._meta.abstract 

163 ) 

164 or (MultipleObjectsReturned,), 

165 module, 

166 attached_to=new_class, 

167 ), 

168 ) 

169 if base_meta and not base_meta.abstract: 

170 # Non-abstract child classes inherit some attributes from their 

171 # non-abstract parent (unless an ABC comes before it in the 

172 # method resolution order). 

173 if not hasattr(meta, "ordering"): 

174 new_class._meta.ordering = base_meta.ordering 

175 if not hasattr(meta, "get_latest_by"): 

176 new_class._meta.get_latest_by = base_meta.get_latest_by 

177 

178 # Add remaining attributes (those with a contribute_to_class() method) 

179 # to the class. 

180 for obj_name, obj in contributable_attrs.items(): 

181 new_class.add_to_class(obj_name, obj) 

182 

183 # All the fields of any type declared on this model 

184 new_fields = chain( 

185 new_class._meta.local_fields, 

186 new_class._meta.local_many_to_many, 

187 new_class._meta.private_fields, 

188 ) 

189 field_names = {f.name for f in new_fields} 

190 

191 new_class._meta.concrete_model = new_class 

192 

193 # Collect the parent links for multi-table inheritance. 

194 parent_links = {} 

195 for base in reversed([new_class] + parents): 

196 # Conceptually equivalent to `if base is Model`. 

197 if not hasattr(base, "_meta"): 

198 continue 

199 # Skip concrete parent classes. 

200 if base != new_class and not base._meta.abstract: 

201 continue 

202 # Locate OneToOneField instances. 

203 for field in base._meta.local_fields: 

204 if isinstance(field, OneToOneField) and field.remote_field.parent_link: 

205 related = resolve_relation(new_class, field.remote_field.model) 

206 parent_links[make_model_tuple(related)] = field 

207 

208 # Track fields inherited from base models. 

209 inherited_attributes = set() 

210 # Do the appropriate setup for any model parents. 

211 for base in new_class.mro(): 

212 if base not in parents or not hasattr(base, "_meta"): 

213 # Things without _meta aren't functional models, so they're 

214 # uninteresting parents. 

215 inherited_attributes.update(base.__dict__) 

216 continue 

217 

218 parent_fields = base._meta.local_fields + base._meta.local_many_to_many 

219 if not base._meta.abstract: 

220 # Check for clashes between locally declared fields and those 

221 # on the base classes. 

222 for field in parent_fields: 

223 if field.name in field_names: 

224 raise FieldError( 

225 f"Local field {field.name!r} in class {name!r} clashes with field of " 

226 f"the same name from base class {base.__name__!r}." 

227 ) 

228 else: 

229 inherited_attributes.add(field.name) 

230 

231 # Concrete classes... 

232 base = base._meta.concrete_model 

233 base_key = make_model_tuple(base) 

234 if base_key in parent_links: 

235 field = parent_links[base_key] 

236 else: 

237 attr_name = f"{base._meta.model_name}_ptr" 

238 field = OneToOneField( 

239 base, 

240 on_delete=CASCADE, 

241 name=attr_name, 

242 auto_created=True, 

243 parent_link=True, 

244 ) 

245 

246 if attr_name in field_names: 

247 raise FieldError( 

248 f"Auto-generated field '{attr_name}' in class {name!r} for " 

249 f"parent_link to base class {base.__name__!r} clashes with " 

250 "declared field of the same name." 

251 ) 

252 

253 # Only add the ptr field if it's not already present; 

254 # e.g. migrations will already have it specified 

255 if not hasattr(new_class, attr_name): 

256 new_class.add_to_class(attr_name, field) 

257 new_class._meta.parents[base] = field 

258 else: 

259 base_parents = base._meta.parents.copy() 

260 

261 # Add fields from abstract base class if it wasn't overridden. 

262 for field in parent_fields: 

263 if ( 

264 field.name not in field_names 

265 and field.name not in new_class.__dict__ 

266 and field.name not in inherited_attributes 

267 ): 

268 new_field = copy.deepcopy(field) 

269 new_class.add_to_class(field.name, new_field) 

270 # Replace parent links defined on this base by the new 

271 # field. It will be appropriately resolved if required. 

272 if field.one_to_one: 

273 for parent, parent_link in base_parents.items(): 

274 if field == parent_link: 

275 base_parents[parent] = new_field 

276 

277 # Pass any non-abstract parent classes onto child. 

278 new_class._meta.parents.update(base_parents) 

279 

280 # Inherit private fields (like GenericForeignKey) from the parent 

281 # class 

282 for field in base._meta.private_fields: 

283 if field.name in field_names: 

284 if not base._meta.abstract: 

285 raise FieldError( 

286 f"Local field {field.name!r} in class {name!r} clashes with field of " 

287 f"the same name from base class {base.__name__!r}." 

288 ) 

289 else: 

290 field = copy.deepcopy(field) 

291 if not base._meta.abstract: 

292 field.mti_inherited = True 

293 new_class.add_to_class(field.name, field) 

294 

295 # Copy indexes so that index names are unique when models extend an 

296 # abstract model. 

297 new_class._meta.indexes = [ 

298 copy.deepcopy(idx) for idx in new_class._meta.indexes 

299 ] 

300 

301 if abstract: 

302 # Abstract base models can't be instantiated and don't appear in 

303 # the list of models for an app. We do the final setup for them a 

304 # little differently from normal models. 

305 attr_meta.abstract = False 

306 new_class.Meta = attr_meta 

307 return new_class 

308 

309 new_class._prepare() 

310 new_class._meta.packages.register_model( 

311 new_class._meta.package_label, new_class 

312 ) 

313 return new_class 

314 

315 def add_to_class(cls, name, value): 

316 if _has_contribute_to_class(value): 

317 value.contribute_to_class(cls, name) 

318 else: 

319 setattr(cls, name, value) 

320 

321 def _prepare(cls): 

322 """Create some methods once self._meta has been populated.""" 

323 opts = cls._meta 

324 opts._prepare(cls) 

325 

326 if opts.order_with_respect_to: 

327 cls.get_next_in_order = partialmethod( 

328 cls._get_next_or_previous_in_order, is_next=True 

329 ) 

330 cls.get_previous_in_order = partialmethod( 

331 cls._get_next_or_previous_in_order, is_next=False 

332 ) 

333 

334 # Defer creating accessors on the foreign class until it has been 

335 # created and registered. If remote_field is None, we're ordering 

336 # with respect to a GenericForeignKey and don't know what the 

337 # foreign class is - we'll add those accessors later in 

338 # contribute_to_class(). 

339 if opts.order_with_respect_to.remote_field: 

340 wrt = opts.order_with_respect_to 

341 remote = wrt.remote_field.model 

342 lazy_related_operation(make_foreign_order_accessors, cls, remote) 

343 

344 # Give the class a docstring -- its definition. 

345 if cls.__doc__ is None: 

346 cls.__doc__ = "{}({})".format( 

347 cls.__name__, 

348 ", ".join(f.name for f in opts.fields), 

349 ) 

350 

351 if not opts.managers: 

352 if any(f.name == "objects" for f in opts.fields): 

353 raise ValueError( 

354 f"Model {cls.__name__} must specify a custom Manager, because it has a " 

355 "field named 'objects'." 

356 ) 

357 manager = Manager() 

358 manager.auto_created = True 

359 cls.add_to_class("objects", manager) 

360 

361 # Set the name of _meta.indexes. This can't be done in 

362 # Options.contribute_to_class() because fields haven't been added to 

363 # the model at that point. 

364 for index in cls._meta.indexes: 

365 if not index.name: 

366 index.set_name_with_model(cls) 

367 

368 class_prepared.send(sender=cls) 

369 

370 @property 

371 def _base_manager(cls): 

372 return cls._meta.base_manager 

373 

374 @property 

375 def _default_manager(cls): 

376 return cls._meta.default_manager 

377 

378 

379class ModelStateFieldsCacheDescriptor: 

380 def __get__(self, instance, cls=None): 

381 if instance is None: 

382 return self 

383 res = instance.fields_cache = {} 

384 return res 

385 

386 

387class ModelState: 

388 """Store model instance state.""" 

389 

390 db = None 

391 # If true, uniqueness validation checks will consider this a new, unsaved 

392 # object. Necessary for correct validation of new instances of objects with 

393 # explicit (non-auto) PKs. This impacts validation only; it has no effect 

394 # on the actual save. 

395 adding = True 

396 fields_cache = ModelStateFieldsCacheDescriptor() 

397 

398 

399class Model(AltersData, metaclass=ModelBase): 

400 def __init__(self, *args, **kwargs): 

401 # Alias some things as locals to avoid repeat global lookups 

402 cls = self.__class__ 

403 opts = self._meta 

404 _setattr = setattr 

405 _DEFERRED = DEFERRED 

406 if opts.abstract: 

407 raise TypeError("Abstract models cannot be instantiated.") 

408 

409 pre_init.send(sender=cls, args=args, kwargs=kwargs) 

410 

411 # Set up the storage for instance state 

412 self._state = ModelState() 

413 

414 # There is a rather weird disparity here; if kwargs, it's set, then args 

415 # overrides it. It should be one or the other; don't duplicate the work 

416 # The reason for the kwargs check is that standard iterator passes in by 

417 # args, and instantiation for iteration is 33% faster. 

418 if len(args) > len(opts.concrete_fields): 

419 # Daft, but matches old exception sans the err msg. 

420 raise IndexError("Number of args exceeds number of fields") 

421 

422 if not kwargs: 

423 fields_iter = iter(opts.concrete_fields) 

424 # The ordering of the zip calls matter - zip throws StopIteration 

425 # when an iter throws it. So if the first iter throws it, the second 

426 # is *not* consumed. We rely on this, so don't change the order 

427 # without changing the logic. 

428 for val, field in zip(args, fields_iter): 

429 if val is _DEFERRED: 

430 continue 

431 _setattr(self, field.attname, val) 

432 else: 

433 # Slower, kwargs-ready version. 

434 fields_iter = iter(opts.fields) 

435 for val, field in zip(args, fields_iter): 

436 if val is _DEFERRED: 

437 continue 

438 _setattr(self, field.attname, val) 

439 if kwargs.pop(field.name, NOT_PROVIDED) is not NOT_PROVIDED: 

440 raise TypeError( 

441 f"{cls.__qualname__}() got both positional and " 

442 f"keyword arguments for field '{field.name}'." 

443 ) 

444 

445 # Now we're left with the unprocessed fields that *must* come from 

446 # keywords, or default. 

447 

448 for field in fields_iter: 

449 is_related_object = False 

450 # Virtual field 

451 if field.attname not in kwargs and field.column is None: 

452 continue 

453 if kwargs: 

454 if isinstance(field.remote_field, ForeignObjectRel): 

455 try: 

456 # Assume object instance was passed in. 

457 rel_obj = kwargs.pop(field.name) 

458 is_related_object = True 

459 except KeyError: 

460 try: 

461 # Object instance wasn't passed in -- must be an ID. 

462 val = kwargs.pop(field.attname) 

463 except KeyError: 

464 val = field.get_default() 

465 else: 

466 try: 

467 val = kwargs.pop(field.attname) 

468 except KeyError: 

469 # This is done with an exception rather than the 

470 # default argument on pop because we don't want 

471 # get_default() to be evaluated, and then not used. 

472 # Refs #12057. 

473 val = field.get_default() 

474 else: 

475 val = field.get_default() 

476 

477 if is_related_object: 

478 # If we are passed a related instance, set it using the 

479 # field.name instead of field.attname (e.g. "user" instead of 

480 # "user_id") so that the object gets properly cached (and type 

481 # checked) by the RelatedObjectDescriptor. 

482 if rel_obj is not _DEFERRED: 

483 _setattr(self, field.name, rel_obj) 

484 else: 

485 if val is not _DEFERRED: 

486 _setattr(self, field.attname, val) 

487 

488 if kwargs: 

489 property_names = opts._property_names 

490 unexpected = () 

491 for prop, value in kwargs.items(): 

492 # Any remaining kwargs must correspond to properties or virtual 

493 # fields. 

494 if prop in property_names: 

495 if value is not _DEFERRED: 

496 _setattr(self, prop, value) 

497 else: 

498 try: 

499 opts.get_field(prop) 

500 except FieldDoesNotExist: 

501 unexpected += (prop,) 

502 else: 

503 if value is not _DEFERRED: 

504 _setattr(self, prop, value) 

505 if unexpected: 

506 unexpected_names = ", ".join(repr(n) for n in unexpected) 

507 raise TypeError( 

508 f"{cls.__name__}() got unexpected keyword arguments: " 

509 f"{unexpected_names}" 

510 ) 

511 super().__init__() 

512 post_init.send(sender=cls, instance=self) 

513 

514 @classmethod 

515 def from_db(cls, db, field_names, values): 

516 if len(values) != len(cls._meta.concrete_fields): 

517 values_iter = iter(values) 

518 values = [ 

519 next(values_iter) if f.attname in field_names else DEFERRED 

520 for f in cls._meta.concrete_fields 

521 ] 

522 new = cls(*values) 

523 new._state.adding = False 

524 new._state.db = db 

525 return new 

526 

527 def __repr__(self): 

528 return f"<{self.__class__.__name__}: {self}>" 

529 

530 def __str__(self): 

531 return f"{self.__class__.__name__} object ({self.pk})" 

532 

533 def __eq__(self, other): 

534 if not isinstance(other, Model): 

535 return NotImplemented 

536 if self._meta.concrete_model != other._meta.concrete_model: 

537 return False 

538 my_pk = self.pk 

539 if my_pk is None: 

540 return self is other 

541 return my_pk == other.pk 

542 

543 def __hash__(self): 

544 if self.pk is None: 

545 raise TypeError("Model instances without primary key value are unhashable") 

546 return hash(self.pk) 

547 

548 def __reduce__(self): 

549 data = self.__getstate__() 

550 data[PLAIN_VERSION_PICKLE_KEY] = plain.runtime.__version__ 

551 class_id = self._meta.package_label, self._meta.object_name 

552 return model_unpickle, (class_id,), data 

553 

554 def __getstate__(self): 

555 """Hook to allow choosing the attributes to pickle.""" 

556 state = self.__dict__.copy() 

557 state["_state"] = copy.copy(state["_state"]) 

558 state["_state"].fields_cache = state["_state"].fields_cache.copy() 

559 # memoryview cannot be pickled, so cast it to bytes and store 

560 # separately. 

561 _memoryview_attrs = [] 

562 for attr, value in state.items(): 

563 if isinstance(value, memoryview): 

564 _memoryview_attrs.append((attr, bytes(value))) 

565 if _memoryview_attrs: 

566 state["_memoryview_attrs"] = _memoryview_attrs 

567 for attr, value in _memoryview_attrs: 

568 state.pop(attr) 

569 return state 

570 

571 def __setstate__(self, state): 

572 pickled_version = state.get(PLAIN_VERSION_PICKLE_KEY) 

573 if pickled_version: 

574 if pickled_version != plain.runtime.__version__: 

575 warnings.warn( 

576 f"Pickled model instance's Plain version {pickled_version} does not " 

577 f"match the current version {plain.runtime.__version__}.", 

578 RuntimeWarning, 

579 stacklevel=2, 

580 ) 

581 else: 

582 warnings.warn( 

583 "Pickled model instance's Plain version is not specified.", 

584 RuntimeWarning, 

585 stacklevel=2, 

586 ) 

587 if "_memoryview_attrs" in state: 

588 for attr, value in state.pop("_memoryview_attrs"): 

589 state[attr] = memoryview(value) 

590 self.__dict__.update(state) 

591 

592 def _get_pk_val(self, meta=None): 

593 meta = meta or self._meta 

594 return getattr(self, meta.pk.attname) 

595 

596 def _set_pk_val(self, value): 

597 for parent_link in self._meta.parents.values(): 

598 if parent_link and parent_link != self._meta.pk: 

599 setattr(self, parent_link.target_field.attname, value) 

600 return setattr(self, self._meta.pk.attname, value) 

601 

602 pk = property(_get_pk_val, _set_pk_val) 

603 

604 def get_deferred_fields(self): 

605 """ 

606 Return a set containing names of deferred fields for this instance. 

607 """ 

608 return { 

609 f.attname 

610 for f in self._meta.concrete_fields 

611 if f.attname not in self.__dict__ 

612 } 

613 

614 def refresh_from_db(self, using=None, fields=None): 

615 """ 

616 Reload field values from the database. 

617 

618 By default, the reloading happens from the database this instance was 

619 loaded from, or by the read router if this instance wasn't loaded from 

620 any database. The using parameter will override the default. 

621 

622 Fields can be used to specify which fields to reload. The fields 

623 should be an iterable of field attnames. If fields is None, then 

624 all non-deferred fields are reloaded. 

625 

626 When accessing deferred fields of an instance, the deferred loading 

627 of the field will call this method. 

628 """ 

629 if fields is None: 

630 self._prefetched_objects_cache = {} 

631 else: 

632 prefetched_objects_cache = getattr(self, "_prefetched_objects_cache", ()) 

633 for field in fields: 

634 if field in prefetched_objects_cache: 

635 del prefetched_objects_cache[field] 

636 fields.remove(field) 

637 if not fields: 

638 return 

639 if any(LOOKUP_SEP in f for f in fields): 

640 raise ValueError( 

641 f'Found "{LOOKUP_SEP}" in fields argument. Relations and transforms ' 

642 "are not allowed in fields." 

643 ) 

644 

645 hints = {"instance": self} 

646 db_instance_qs = self.__class__._base_manager.db_manager( 

647 using, hints=hints 

648 ).filter(pk=self.pk) 

649 

650 # Use provided fields, if not set then reload all non-deferred fields. 

651 deferred_fields = self.get_deferred_fields() 

652 if fields is not None: 

653 fields = list(fields) 

654 db_instance_qs = db_instance_qs.only(*fields) 

655 elif deferred_fields: 

656 fields = [ 

657 f.attname 

658 for f in self._meta.concrete_fields 

659 if f.attname not in deferred_fields 

660 ] 

661 db_instance_qs = db_instance_qs.only(*fields) 

662 

663 db_instance = db_instance_qs.get() 

664 non_loaded_fields = db_instance.get_deferred_fields() 

665 for field in self._meta.concrete_fields: 

666 if field.attname in non_loaded_fields: 

667 # This field wasn't refreshed - skip ahead. 

668 continue 

669 setattr(self, field.attname, getattr(db_instance, field.attname)) 

670 # Clear cached foreign keys. 

671 if field.is_relation and field.is_cached(self): 

672 field.delete_cached_value(self) 

673 

674 # Clear cached relations. 

675 for field in self._meta.related_objects: 

676 if field.is_cached(self): 

677 field.delete_cached_value(self) 

678 

679 # Clear cached private relations. 

680 for field in self._meta.private_fields: 

681 if field.is_relation and field.is_cached(self): 

682 field.delete_cached_value(self) 

683 

684 self._state.db = db_instance._state.db 

685 

686 def serializable_value(self, field_name): 

687 """ 

688 Return the value of the field name for this instance. If the field is 

689 a foreign key, return the id value instead of the object. If there's 

690 no Field object with this name on the model, return the model 

691 attribute's value. 

692 

693 Used to serialize a field's value (in the serializer, or form output, 

694 for example). Normally, you would just access the attribute directly 

695 and not use this method. 

696 """ 

697 try: 

698 field = self._meta.get_field(field_name) 

699 except FieldDoesNotExist: 

700 return getattr(self, field_name) 

701 return getattr(self, field.attname) 

702 

703 def save( 

704 self, 

705 *, 

706 clean_and_validate=True, 

707 force_insert=False, 

708 force_update=False, 

709 using=None, 

710 update_fields=None, 

711 ): 

712 """ 

713 Save the current instance. Override this in a subclass if you want to 

714 control the saving process. 

715 

716 The 'force_insert' and 'force_update' parameters can be used to insist 

717 that the "save" must be an SQL insert or update (or equivalent for 

718 non-SQL backends), respectively. Normally, they should not be set. 

719 """ 

720 self._prepare_related_fields_for_save(operation_name="save") 

721 

722 using = using or router.db_for_write(self.__class__, instance=self) 

723 if force_insert and (force_update or update_fields): 

724 raise ValueError("Cannot force both insert and updating in model saving.") 

725 

726 deferred_fields = self.get_deferred_fields() 

727 if update_fields is not None: 

728 # If update_fields is empty, skip the save. We do also check for 

729 # no-op saves later on for inheritance cases. This bailout is 

730 # still needed for skipping signal sending. 

731 if not update_fields: 

732 return 

733 

734 update_fields = frozenset(update_fields) 

735 field_names = self._meta._non_pk_concrete_field_names 

736 non_model_fields = update_fields.difference(field_names) 

737 

738 if non_model_fields: 

739 raise ValueError( 

740 "The following fields do not exist in this model, are m2m " 

741 "fields, or are non-concrete fields: {}".format( 

742 ", ".join(non_model_fields) 

743 ) 

744 ) 

745 

746 # If saving to the same database, and this model is deferred, then 

747 # automatically do an "update_fields" save on the loaded fields. 

748 elif not force_insert and deferred_fields and using == self._state.db: 

749 field_names = set() 

750 for field in self._meta.concrete_fields: 

751 if not field.primary_key and not hasattr(field, "through"): 

752 field_names.add(field.attname) 

753 loaded_fields = field_names.difference(deferred_fields) 

754 if loaded_fields: 

755 update_fields = frozenset(loaded_fields) 

756 

757 if clean_and_validate: 

758 self.full_clean(exclude=deferred_fields) 

759 

760 self.save_base( 

761 using=using, 

762 force_insert=force_insert, 

763 force_update=force_update, 

764 update_fields=update_fields, 

765 ) 

766 

767 save.alters_data = True 

768 

769 def save_base( 

770 self, 

771 *, 

772 raw=False, 

773 force_insert=False, 

774 force_update=False, 

775 using=None, 

776 update_fields=None, 

777 ): 

778 """ 

779 Handle the parts of saving which should be done only once per save, 

780 yet need to be done in raw saves, too. This includes some sanity 

781 checks and signal sending. 

782 

783 The 'raw' argument is telling save_base not to save any parent 

784 models and not to do any changes to the values before save. This 

785 is used by fixture loading. 

786 """ 

787 using = using or router.db_for_write(self.__class__, instance=self) 

788 assert not (force_insert and (force_update or update_fields)) 

789 assert update_fields is None or update_fields 

790 cls = origin = self.__class__ 

791 meta = cls._meta 

792 if not meta.auto_created: 

793 pre_save.send( 

794 sender=origin, 

795 instance=self, 

796 raw=raw, 

797 using=using, 

798 update_fields=update_fields, 

799 ) 

800 # A transaction isn't needed if one query is issued. 

801 if meta.parents: 

802 context_manager = transaction.atomic(using=using, savepoint=False) 

803 else: 

804 context_manager = transaction.mark_for_rollback_on_error(using=using) 

805 with context_manager: 

806 parent_inserted = False 

807 if not raw: 

808 parent_inserted = self._save_parents(cls, using, update_fields) 

809 updated = self._save_table( 

810 raw, 

811 cls, 

812 force_insert or parent_inserted, 

813 force_update, 

814 using, 

815 update_fields, 

816 ) 

817 # Store the database on which the object was saved 

818 self._state.db = using 

819 # Once saved, this is no longer a to-be-added instance. 

820 self._state.adding = False 

821 

822 # Signal that the save is complete 

823 if not meta.auto_created: 

824 post_save.send( 

825 sender=origin, 

826 instance=self, 

827 created=(not updated), 

828 update_fields=update_fields, 

829 raw=raw, 

830 using=using, 

831 ) 

832 

833 save_base.alters_data = True 

834 

835 def _save_parents(self, cls, using, update_fields): 

836 """Save all the parents of cls using values from self.""" 

837 meta = cls._meta 

838 inserted = False 

839 for parent, field in meta.parents.items(): 

840 # Make sure the link fields are synced between parent and self. 

841 if ( 

842 field 

843 and getattr(self, parent._meta.pk.attname) is None 

844 and getattr(self, field.attname) is not None 

845 ): 

846 setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) 

847 parent_inserted = self._save_parents( 

848 cls=parent, using=using, update_fields=update_fields 

849 ) 

850 updated = self._save_table( 

851 cls=parent, 

852 using=using, 

853 update_fields=update_fields, 

854 force_insert=parent_inserted, 

855 ) 

856 if not updated: 

857 inserted = True 

858 # Set the parent's PK value to self. 

859 if field: 

860 setattr(self, field.attname, self._get_pk_val(parent._meta)) 

861 # Since we didn't have an instance of the parent handy set 

862 # attname directly, bypassing the descriptor. Invalidate 

863 # the related object cache, in case it's been accidentally 

864 # populated. A fresh instance will be re-built from the 

865 # database if necessary. 

866 if field.is_cached(self): 

867 field.delete_cached_value(self) 

868 return inserted 

869 

870 def _save_table( 

871 self, 

872 raw=False, 

873 cls=None, 

874 force_insert=False, 

875 force_update=False, 

876 using=None, 

877 update_fields=None, 

878 ): 

879 """ 

880 Do the heavy-lifting involved in saving. Update or insert the data 

881 for a single table. 

882 """ 

883 meta = cls._meta 

884 non_pks = [f for f in meta.local_concrete_fields if not f.primary_key] 

885 

886 if update_fields: 

887 non_pks = [ 

888 f 

889 for f in non_pks 

890 if f.name in update_fields or f.attname in update_fields 

891 ] 

892 

893 pk_val = self._get_pk_val(meta) 

894 if pk_val is None: 

895 pk_val = meta.pk.get_pk_value_on_save(self) 

896 setattr(self, meta.pk.attname, pk_val) 

897 pk_set = pk_val is not None 

898 if not pk_set and (force_update or update_fields): 

899 raise ValueError("Cannot force an update in save() with no primary key.") 

900 updated = False 

901 # Skip an UPDATE when adding an instance and primary key has a default. 

902 if ( 

903 not raw 

904 and not force_insert 

905 and self._state.adding 

906 and meta.pk.default 

907 and meta.pk.default is not NOT_PROVIDED 

908 ): 

909 force_insert = True 

910 # If possible, try an UPDATE. If that doesn't update anything, do an INSERT. 

911 if pk_set and not force_insert: 

912 base_qs = cls._base_manager.using(using) 

913 values = [ 

914 ( 

915 f, 

916 None, 

917 (getattr(self, f.attname) if raw else f.pre_save(self, False)), 

918 ) 

919 for f in non_pks 

920 ] 

921 forced_update = update_fields or force_update 

922 updated = self._do_update( 

923 base_qs, using, pk_val, values, update_fields, forced_update 

924 ) 

925 if force_update and not updated: 

926 raise DatabaseError("Forced update did not affect any rows.") 

927 if update_fields and not updated: 

928 raise DatabaseError("Save with update_fields did not affect any rows.") 

929 if not updated: 

930 if meta.order_with_respect_to: 

931 # If this is a model with an order_with_respect_to 

932 # autopopulate the _order field 

933 field = meta.order_with_respect_to 

934 filter_args = field.get_filter_kwargs_for_object(self) 

935 self._order = ( 

936 cls._base_manager.using(using) 

937 .filter(**filter_args) 

938 .aggregate( 

939 _order__max=Coalesce( 

940 ExpressionWrapper( 

941 Max("_order") + Value(1), output_field=IntegerField() 

942 ), 

943 Value(0), 

944 ), 

945 )["_order__max"] 

946 ) 

947 fields = meta.local_concrete_fields 

948 if not pk_set: 

949 fields = [f for f in fields if f is not meta.auto_field] 

950 

951 returning_fields = meta.db_returning_fields 

952 results = self._do_insert( 

953 cls._base_manager, using, fields, returning_fields, raw 

954 ) 

955 if results: 

956 for value, field in zip(results[0], returning_fields): 

957 setattr(self, field.attname, value) 

958 return updated 

959 

960 def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): 

961 """ 

962 Try to update the model. Return True if the model was updated (if an 

963 update query was done and a matching row was found in the DB). 

964 """ 

965 filtered = base_qs.filter(pk=pk_val) 

966 if not values: 

967 # We can end up here when saving a model in inheritance chain where 

968 # update_fields doesn't target any field in current model. In that 

969 # case we just say the update succeeded. Another case ending up here 

970 # is a model with just PK - in that case check that the PK still 

971 # exists. 

972 return update_fields is not None or filtered.exists() 

973 if self._meta.select_on_save and not forced_update: 

974 return ( 

975 filtered.exists() 

976 and 

977 # It may happen that the object is deleted from the DB right after 

978 # this check, causing the subsequent UPDATE to return zero matching 

979 # rows. The same result can occur in some rare cases when the 

980 # database returns zero despite the UPDATE being executed 

981 # successfully (a row is matched and updated). In order to 

982 # distinguish these two cases, the object's existence in the 

983 # database is again checked for if the UPDATE query returns 0. 

984 (filtered._update(values) > 0 or filtered.exists()) 

985 ) 

986 return filtered._update(values) > 0 

987 

988 def _do_insert(self, manager, using, fields, returning_fields, raw): 

989 """ 

990 Do an INSERT. If returning_fields is defined then this method should 

991 return the newly created data for the model. 

992 """ 

993 return manager._insert( 

994 [self], 

995 fields=fields, 

996 returning_fields=returning_fields, 

997 using=using, 

998 raw=raw, 

999 ) 

1000 

1001 def _prepare_related_fields_for_save(self, operation_name, fields=None): 

1002 # Ensure that a model instance without a PK hasn't been assigned to 

1003 # a ForeignKey, GenericForeignKey or OneToOneField on this model. If 

1004 # the field is nullable, allowing the save would result in silent data 

1005 # loss. 

1006 for field in self._meta.concrete_fields: 

1007 if fields and field not in fields: 

1008 continue 

1009 # If the related field isn't cached, then an instance hasn't been 

1010 # assigned and there's no need to worry about this check. 

1011 if field.is_relation and field.is_cached(self): 

1012 obj = getattr(self, field.name, None) 

1013 if not obj: 

1014 continue 

1015 # A pk may have been assigned manually to a model instance not 

1016 # saved to the database (or auto-generated in a case like 

1017 # UUIDField), but we allow the save to proceed and rely on the 

1018 # database to raise an IntegrityError if applicable. If 

1019 # constraints aren't supported by the database, there's the 

1020 # unavoidable risk of data corruption. 

1021 if obj.pk is None: 

1022 # Remove the object from a related instance cache. 

1023 if not field.remote_field.multiple: 

1024 field.remote_field.delete_cached_value(obj) 

1025 raise ValueError( 

1026 f"{operation_name}() prohibited to prevent data loss due to unsaved " 

1027 f"related object '{field.name}'." 

1028 ) 

1029 elif getattr(self, field.attname) in field.empty_values: 

1030 # Set related object if it has been saved after an 

1031 # assignment. 

1032 setattr(self, field.name, obj) 

1033 # If the relationship's pk/to_field was changed, clear the 

1034 # cached relationship. 

1035 if getattr(obj, field.target_field.attname) != getattr( 

1036 self, field.attname 

1037 ): 

1038 field.delete_cached_value(self) 

1039 # GenericForeignKeys are private. 

1040 for field in self._meta.private_fields: 

1041 if fields and field not in fields: 

1042 continue 

1043 if ( 

1044 field.is_relation 

1045 and field.is_cached(self) 

1046 and hasattr(field, "fk_field") 

1047 ): 

1048 obj = field.get_cached_value(self, default=None) 

1049 if obj and obj.pk is None: 

1050 raise ValueError( 

1051 f"{operation_name}() prohibited to prevent data loss due to " 

1052 f"unsaved related object '{field.name}'." 

1053 ) 

1054 

1055 def delete(self, using=None, keep_parents=False): 

1056 if self.pk is None: 

1057 raise ValueError( 

1058 f"{self._meta.object_name} object can't be deleted because its {self._meta.pk.attname} attribute is set " 

1059 "to None." 

1060 ) 

1061 using = using or router.db_for_write(self.__class__, instance=self) 

1062 collector = Collector(using=using, origin=self) 

1063 collector.collect([self], keep_parents=keep_parents) 

1064 return collector.delete() 

1065 

1066 delete.alters_data = True 

1067 

1068 def _get_FIELD_display(self, field): 

1069 value = getattr(self, field.attname) 

1070 choices_dict = dict(make_hashable(field.flatchoices)) 

1071 # force_str() to coerce lazy strings. 

1072 return force_str( 

1073 choices_dict.get(make_hashable(value), value), strings_only=True 

1074 ) 

1075 

1076 def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): 

1077 if not self.pk: 

1078 raise ValueError("get_next/get_previous cannot be used on unsaved objects.") 

1079 op = "gt" if is_next else "lt" 

1080 order = "" if is_next else "-" 

1081 param = getattr(self, field.attname) 

1082 q = Q.create([(field.name, param), (f"pk__{op}", self.pk)], connector=Q.AND) 

1083 q = Q.create([q, (f"{field.name}__{op}", param)], connector=Q.OR) 

1084 qs = ( 

1085 self.__class__._default_manager.using(self._state.db) 

1086 .filter(**kwargs) 

1087 .filter(q) 

1088 .order_by(f"{order}{field.name}", f"{order}pk") 

1089 ) 

1090 try: 

1091 return qs[0] 

1092 except IndexError: 

1093 raise self.DoesNotExist( 

1094 f"{self.__class__._meta.object_name} matching query does not exist." 

1095 ) 

1096 

1097 def _get_next_or_previous_in_order(self, is_next): 

1098 cachename = f"__{is_next}_order_cache" 

1099 if not hasattr(self, cachename): 

1100 op = "gt" if is_next else "lt" 

1101 order = "_order" if is_next else "-_order" 

1102 order_field = self._meta.order_with_respect_to 

1103 filter_args = order_field.get_filter_kwargs_for_object(self) 

1104 obj = ( 

1105 self.__class__._default_manager.filter(**filter_args) 

1106 .filter( 

1107 **{ 

1108 f"_order__{op}": self.__class__._default_manager.values( 

1109 "_order" 

1110 ).filter(**{self._meta.pk.name: self.pk}) 

1111 } 

1112 ) 

1113 .order_by(order)[:1] 

1114 .get() 

1115 ) 

1116 setattr(self, cachename, obj) 

1117 return getattr(self, cachename) 

1118 

1119 def _get_field_value_map(self, meta, exclude=None): 

1120 if exclude is None: 

1121 exclude = set() 

1122 meta = meta or self._meta 

1123 return { 

1124 field.name: Value(getattr(self, field.attname), field) 

1125 for field in meta.local_concrete_fields 

1126 if field.name not in exclude 

1127 } 

1128 

1129 def prepare_database_save(self, field): 

1130 if self.pk is None: 

1131 raise ValueError( 

1132 f"Unsaved model instance {self!r} cannot be used in an ORM query." 

1133 ) 

1134 return getattr(self, field.remote_field.get_related_field().attname) 

1135 

1136 def clean(self): 

1137 """ 

1138 Hook for doing any extra model-wide validation after clean() has been 

1139 called on every field by self.clean_fields. Any ValidationError raised 

1140 by this method will not be associated with a particular field; it will 

1141 have a special-case association with the field defined by NON_FIELD_ERRORS. 

1142 """ 

1143 pass 

1144 

1145 def validate_unique(self, exclude=None): 

1146 """ 

1147 Check unique constraints on the model and raise ValidationError if any 

1148 failed. 

1149 """ 

1150 unique_checks = self._get_unique_checks(exclude=exclude) 

1151 

1152 if errors := self._perform_unique_checks(unique_checks): 

1153 raise ValidationError(errors) 

1154 

1155 def _get_unique_checks(self, exclude=None): 

1156 """ 

1157 Return a list of checks to perform. Since validate_unique() could be 

1158 called from a ModelForm, some fields may have been excluded; we can't 

1159 perform a unique check on a model that is missing fields involved 

1160 in that check. Fields that did not validate should also be excluded, 

1161 but they need to be passed in via the exclude argument. 

1162 """ 

1163 if exclude is None: 

1164 exclude = set() 

1165 unique_checks = [] 

1166 

1167 # Gather a list of checks for fields declared as unique and add them to 

1168 # the list of checks. 

1169 

1170 fields_with_class = [(self.__class__, self._meta.local_fields)] 

1171 for parent_class in self._meta.get_parent_list(): 

1172 fields_with_class.append((parent_class, parent_class._meta.local_fields)) 

1173 

1174 for model_class, fields in fields_with_class: 

1175 for f in fields: 

1176 name = f.name 

1177 if name in exclude: 

1178 continue 

1179 if f.unique: 

1180 unique_checks.append((model_class, (name,))) 

1181 

1182 return unique_checks 

1183 

1184 def _perform_unique_checks(self, unique_checks): 

1185 errors = {} 

1186 

1187 for model_class, unique_check in unique_checks: 

1188 # Try to look up an existing object with the same values as this 

1189 # object's values for all the unique field. 

1190 

1191 lookup_kwargs = {} 

1192 for field_name in unique_check: 

1193 f = self._meta.get_field(field_name) 

1194 lookup_value = getattr(self, f.attname) 

1195 # TODO: Handle multiple backends with different feature flags. 

1196 if lookup_value is None or ( 

1197 lookup_value == "" 

1198 and connection.features.interprets_empty_strings_as_nulls 

1199 ): 

1200 # no value, skip the lookup 

1201 continue 

1202 if f.primary_key and not self._state.adding: 

1203 # no need to check for unique primary key when editing 

1204 continue 

1205 lookup_kwargs[str(field_name)] = lookup_value 

1206 

1207 # some fields were skipped, no reason to do the check 

1208 if len(unique_check) != len(lookup_kwargs): 

1209 continue 

1210 

1211 qs = model_class._default_manager.filter(**lookup_kwargs) 

1212 

1213 # Exclude the current object from the query if we are editing an 

1214 # instance (as opposed to creating a new one) 

1215 # Note that we need to use the pk as defined by model_class, not 

1216 # self.pk. These can be different fields because model inheritance 

1217 # allows single model to have effectively multiple primary keys. 

1218 # Refs #17615. 

1219 model_class_pk = self._get_pk_val(model_class._meta) 

1220 if not self._state.adding and model_class_pk is not None: 

1221 qs = qs.exclude(pk=model_class_pk) 

1222 if qs.exists(): 

1223 if len(unique_check) == 1: 

1224 key = unique_check[0] 

1225 else: 

1226 key = NON_FIELD_ERRORS 

1227 errors.setdefault(key, []).append( 

1228 self.unique_error_message(model_class, unique_check) 

1229 ) 

1230 

1231 return errors 

1232 

1233 def unique_error_message(self, model_class, unique_check): 

1234 opts = model_class._meta 

1235 

1236 params = { 

1237 "model": self, 

1238 "model_class": model_class, 

1239 "model_name": opts.model_name, 

1240 "unique_check": unique_check, 

1241 } 

1242 

1243 if len(unique_check) == 1: 

1244 field = opts.get_field(unique_check[0]) 

1245 params["field_label"] = field.name 

1246 return ValidationError( 

1247 message=field.error_messages["unique"], 

1248 code="unique", 

1249 params=params, 

1250 ) 

1251 else: 

1252 field_names = [opts.get_field(f).name for f in unique_check] 

1253 

1254 # Put an "and" before the last one 

1255 field_names[-1] = f"and {field_names[-1]}" 

1256 

1257 if len(field_names) > 2: 

1258 # Comma join if more than 2 

1259 params["field_label"] = ", ".join(field_names) 

1260 else: 

1261 # Just a space if there are only 2 

1262 params["field_label"] = " ".join(field_names) 

1263 

1264 # Use the first field as the message format... 

1265 message = opts.get_field(unique_check[0]).error_messages["unique"] 

1266 

1267 return ValidationError( 

1268 message=message, 

1269 code="unique", 

1270 params=params, 

1271 ) 

1272 

1273 def get_constraints(self): 

1274 constraints = [(self.__class__, self._meta.constraints)] 

1275 for parent_class in self._meta.get_parent_list(): 

1276 if parent_class._meta.constraints: 

1277 constraints.append((parent_class, parent_class._meta.constraints)) 

1278 return constraints 

1279 

1280 def validate_constraints(self, exclude=None): 

1281 constraints = self.get_constraints() 

1282 using = router.db_for_write(self.__class__, instance=self) 

1283 

1284 errors = {} 

1285 for model_class, model_constraints in constraints: 

1286 for constraint in model_constraints: 

1287 try: 

1288 constraint.validate(model_class, self, exclude=exclude, using=using) 

1289 except ValidationError as e: 

1290 if ( 

1291 getattr(e, "code", None) == "unique" 

1292 and len(constraint.fields) == 1 

1293 ): 

1294 errors.setdefault(constraint.fields[0], []).append(e) 

1295 else: 

1296 errors = e.update_error_dict(errors) 

1297 if errors: 

1298 raise ValidationError(errors) 

1299 

1300 def full_clean( 

1301 self, *, exclude=None, validate_unique=True, validate_constraints=True 

1302 ): 

1303 """ 

1304 Call clean_fields(), clean(), validate_unique(), and 

1305 validate_constraints() on the model. Raise a ValidationError for any 

1306 errors that occur. 

1307 """ 

1308 errors = {} 

1309 if exclude is None: 

1310 exclude = set() 

1311 else: 

1312 exclude = set(exclude) 

1313 

1314 try: 

1315 self.clean_fields(exclude=exclude) 

1316 except ValidationError as e: 

1317 errors = e.update_error_dict(errors) 

1318 

1319 # Form.clean() is run even if other validation fails, so do the 

1320 # same with Model.clean() for consistency. 

1321 try: 

1322 self.clean() 

1323 except ValidationError as e: 

1324 errors = e.update_error_dict(errors) 

1325 

1326 # Run unique checks, but only for fields that passed validation. 

1327 if validate_unique: 

1328 for name in errors: 

1329 if name != NON_FIELD_ERRORS and name not in exclude: 

1330 exclude.add(name) 

1331 try: 

1332 self.validate_unique(exclude=exclude) 

1333 except ValidationError as e: 

1334 errors = e.update_error_dict(errors) 

1335 

1336 # Run constraints checks, but only for fields that passed validation. 

1337 if validate_constraints: 

1338 for name in errors: 

1339 if name != NON_FIELD_ERRORS and name not in exclude: 

1340 exclude.add(name) 

1341 try: 

1342 self.validate_constraints(exclude=exclude) 

1343 except ValidationError as e: 

1344 errors = e.update_error_dict(errors) 

1345 

1346 if errors: 

1347 raise ValidationError(errors) 

1348 

1349 def clean_fields(self, exclude=None): 

1350 """ 

1351 Clean all fields and raise a ValidationError containing a dict 

1352 of all validation errors if any occur. 

1353 """ 

1354 if exclude is None: 

1355 exclude = set() 

1356 

1357 errors = {} 

1358 for f in self._meta.fields: 

1359 if f.name in exclude: 

1360 continue 

1361 # Skip validation for empty fields with blank=True. The developer 

1362 # is responsible for making sure they have a valid value. 

1363 raw_value = getattr(self, f.attname) 

1364 if f.blank and raw_value in f.empty_values: 

1365 continue 

1366 try: 

1367 setattr(self, f.attname, f.clean(raw_value, self)) 

1368 except ValidationError as e: 

1369 errors[f.name] = e.error_list 

1370 

1371 if errors: 

1372 raise ValidationError(errors) 

1373 

1374 @classmethod 

1375 def check(cls, **kwargs): 

1376 errors = [ 

1377 *cls._check_swappable(), 

1378 *cls._check_managers(**kwargs), 

1379 ] 

1380 if not cls._meta.swapped: 

1381 databases = kwargs.get("databases") or [] 

1382 errors += [ 

1383 *cls._check_fields(**kwargs), 

1384 *cls._check_m2m_through_same_relationship(), 

1385 *cls._check_long_column_names(databases), 

1386 ] 

1387 clash_errors = ( 

1388 *cls._check_id_field(), 

1389 *cls._check_field_name_clashes(), 

1390 *cls._check_model_name_db_lookup_clashes(), 

1391 *cls._check_property_name_related_field_accessor_clashes(), 

1392 *cls._check_single_primary_key(), 

1393 ) 

1394 errors.extend(clash_errors) 

1395 # If there are field name clashes, hide consequent column name 

1396 # clashes. 

1397 if not clash_errors: 

1398 errors.extend(cls._check_column_name_clashes()) 

1399 errors += [ 

1400 *cls._check_indexes(databases), 

1401 *cls._check_ordering(), 

1402 *cls._check_constraints(databases), 

1403 *cls._check_db_table_comment(databases), 

1404 ] 

1405 

1406 return errors 

1407 

1408 @classmethod 

1409 def _check_db_table_comment(cls, databases): 

1410 if not cls._meta.db_table_comment: 

1411 return [] 

1412 errors = [] 

1413 for db in databases: 

1414 if not router.allow_migrate_model(db, cls): 

1415 continue 

1416 connection = connections[db] 

1417 if not ( 

1418 connection.features.supports_comments 

1419 or "supports_comments" in cls._meta.required_db_features 

1420 ): 

1421 errors.append( 

1422 preflight.Warning( 

1423 f"{connection.display_name} does not support comments on " 

1424 f"tables (db_table_comment).", 

1425 obj=cls, 

1426 id="models.W046", 

1427 ) 

1428 ) 

1429 return errors 

1430 

1431 @classmethod 

1432 def _check_swappable(cls): 

1433 """Check if the swapped model exists.""" 

1434 errors = [] 

1435 if cls._meta.swapped: 

1436 try: 

1437 packages.get_model(cls._meta.swapped) 

1438 except ValueError: 

1439 errors.append( 

1440 preflight.Error( 

1441 f"'{cls._meta.swappable}' is not of the form 'package_label.package_name'.", 

1442 id="models.E001", 

1443 ) 

1444 ) 

1445 except LookupError: 

1446 package_label, model_name = cls._meta.swapped.split(".") 

1447 errors.append( 

1448 preflight.Error( 

1449 f"'{cls._meta.swappable}' references '{package_label}.{model_name}', which has not been " 

1450 "installed, or is abstract.", 

1451 id="models.E002", 

1452 ) 

1453 ) 

1454 return errors 

1455 

1456 @classmethod 

1457 def _check_managers(cls, **kwargs): 

1458 """Perform all manager checks.""" 

1459 errors = [] 

1460 for manager in cls._meta.managers: 

1461 errors.extend(manager.check(**kwargs)) 

1462 return errors 

1463 

1464 @classmethod 

1465 def _check_fields(cls, **kwargs): 

1466 """Perform all field checks.""" 

1467 errors = [] 

1468 for field in cls._meta.local_fields: 

1469 errors.extend(field.check(**kwargs)) 

1470 for field in cls._meta.local_many_to_many: 

1471 errors.extend(field.check(from_model=cls, **kwargs)) 

1472 return errors 

1473 

1474 @classmethod 

1475 def _check_m2m_through_same_relationship(cls): 

1476 """Check if no relationship model is used by more than one m2m field.""" 

1477 

1478 errors = [] 

1479 seen_intermediary_signatures = [] 

1480 

1481 fields = cls._meta.local_many_to_many 

1482 

1483 # Skip when the target model wasn't found. 

1484 fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase)) 

1485 

1486 # Skip when the relationship model wasn't found. 

1487 fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase)) 

1488 

1489 for f in fields: 

1490 signature = ( 

1491 f.remote_field.model, 

1492 cls, 

1493 f.remote_field.through, 

1494 f.remote_field.through_fields, 

1495 ) 

1496 if signature in seen_intermediary_signatures: 

1497 errors.append( 

1498 preflight.Error( 

1499 "The model has two identical many-to-many relations " 

1500 f"through the intermediate model '{f.remote_field.through._meta.label}'.", 

1501 obj=cls, 

1502 id="models.E003", 

1503 ) 

1504 ) 

1505 else: 

1506 seen_intermediary_signatures.append(signature) 

1507 return errors 

1508 

1509 @classmethod 

1510 def _check_id_field(cls): 

1511 """Check if `id` field is a primary key.""" 

1512 fields = [ 

1513 f for f in cls._meta.local_fields if f.name == "id" and f != cls._meta.pk 

1514 ] 

1515 # fields is empty or consists of the invalid "id" field 

1516 if fields and not fields[0].primary_key and cls._meta.pk.name == "id": 

1517 return [ 

1518 preflight.Error( 

1519 "'id' can only be used as a field name if the field also " 

1520 "sets 'primary_key=True'.", 

1521 obj=cls, 

1522 id="models.E004", 

1523 ) 

1524 ] 

1525 else: 

1526 return [] 

1527 

1528 @classmethod 

1529 def _check_field_name_clashes(cls): 

1530 """Forbid field shadowing in multi-table inheritance.""" 

1531 errors = [] 

1532 used_fields = {} # name or attname -> field 

1533 

1534 # Check that multi-inheritance doesn't cause field name shadowing. 

1535 for parent in cls._meta.get_parent_list(): 

1536 for f in parent._meta.local_fields: 

1537 clash = used_fields.get(f.name) or used_fields.get(f.attname) or None 

1538 if clash: 

1539 errors.append( 

1540 preflight.Error( 

1541 f"The field '{clash.name}' from parent model " 

1542 f"'{clash.model._meta}' clashes with the field '{f.name}' " 

1543 f"from parent model '{f.model._meta}'.", 

1544 obj=cls, 

1545 id="models.E005", 

1546 ) 

1547 ) 

1548 used_fields[f.name] = f 

1549 used_fields[f.attname] = f 

1550 

1551 # Check that fields defined in the model don't clash with fields from 

1552 # parents, including auto-generated fields like multi-table inheritance 

1553 # child accessors. 

1554 for parent in cls._meta.get_parent_list(): 

1555 for f in parent._meta.get_fields(): 

1556 if f not in used_fields: 

1557 used_fields[f.name] = f 

1558 

1559 for f in cls._meta.local_fields: 

1560 clash = used_fields.get(f.name) or used_fields.get(f.attname) or None 

1561 # Note that we may detect clash between user-defined non-unique 

1562 # field "id" and automatically added unique field "id", both 

1563 # defined at the same model. This special case is considered in 

1564 # _check_id_field and here we ignore it. 

1565 id_conflict = ( 

1566 f.name == "id" and clash and clash.name == "id" and clash.model == cls 

1567 ) 

1568 if clash and not id_conflict: 

1569 errors.append( 

1570 preflight.Error( 

1571 f"The field '{f.name}' clashes with the field '{clash.name}' " 

1572 f"from model '{clash.model._meta}'.", 

1573 obj=f, 

1574 id="models.E006", 

1575 ) 

1576 ) 

1577 used_fields[f.name] = f 

1578 used_fields[f.attname] = f 

1579 

1580 return errors 

1581 

1582 @classmethod 

1583 def _check_column_name_clashes(cls): 

1584 # Store a list of column names which have already been used by other fields. 

1585 used_column_names = [] 

1586 errors = [] 

1587 

1588 for f in cls._meta.local_fields: 

1589 _, column_name = f.get_attname_column() 

1590 

1591 # Ensure the column name is not already in use. 

1592 if column_name and column_name in used_column_names: 

1593 errors.append( 

1594 preflight.Error( 

1595 f"Field '{f.name}' has column name '{column_name}' that is used by " 

1596 "another field.", 

1597 hint="Specify a 'db_column' for the field.", 

1598 obj=cls, 

1599 id="models.E007", 

1600 ) 

1601 ) 

1602 else: 

1603 used_column_names.append(column_name) 

1604 

1605 return errors 

1606 

1607 @classmethod 

1608 def _check_model_name_db_lookup_clashes(cls): 

1609 errors = [] 

1610 model_name = cls.__name__ 

1611 if model_name.startswith("_") or model_name.endswith("_"): 

1612 errors.append( 

1613 preflight.Error( 

1614 f"The model name '{model_name}' cannot start or end with an underscore " 

1615 "as it collides with the query lookup syntax.", 

1616 obj=cls, 

1617 id="models.E023", 

1618 ) 

1619 ) 

1620 elif LOOKUP_SEP in model_name: 

1621 errors.append( 

1622 preflight.Error( 

1623 f"The model name '{model_name}' cannot contain double underscores as " 

1624 "it collides with the query lookup syntax.", 

1625 obj=cls, 

1626 id="models.E024", 

1627 ) 

1628 ) 

1629 return errors 

1630 

1631 @classmethod 

1632 def _check_property_name_related_field_accessor_clashes(cls): 

1633 errors = [] 

1634 property_names = cls._meta._property_names 

1635 related_field_accessors = ( 

1636 f.get_attname() 

1637 for f in cls._meta._get_fields(reverse=False) 

1638 if f.is_relation and f.related_model is not None 

1639 ) 

1640 for accessor in related_field_accessors: 

1641 if accessor in property_names: 

1642 errors.append( 

1643 preflight.Error( 

1644 f"The property '{accessor}' clashes with a related field " 

1645 "accessor.", 

1646 obj=cls, 

1647 id="models.E025", 

1648 ) 

1649 ) 

1650 return errors 

1651 

1652 @classmethod 

1653 def _check_single_primary_key(cls): 

1654 errors = [] 

1655 if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1: 

1656 errors.append( 

1657 preflight.Error( 

1658 "The model cannot have more than one field with " 

1659 "'primary_key=True'.", 

1660 obj=cls, 

1661 id="models.E026", 

1662 ) 

1663 ) 

1664 return errors 

1665 

1666 @classmethod 

1667 def _check_indexes(cls, databases): 

1668 """Check fields, names, and conditions of indexes.""" 

1669 errors = [] 

1670 references = set() 

1671 for index in cls._meta.indexes: 

1672 # Index name can't start with an underscore or a number, restricted 

1673 # for cross-database compatibility with Oracle. 

1674 if index.name[0] == "_" or index.name[0].isdigit(): 

1675 errors.append( 

1676 preflight.Error( 

1677 f"The index name '{index.name}' cannot start with an underscore " 

1678 "or a number.", 

1679 obj=cls, 

1680 id="models.E033", 

1681 ), 

1682 ) 

1683 if len(index.name) > index.max_name_length: 

1684 errors.append( 

1685 preflight.Error( 

1686 "The index name '%s' cannot be longer than %d " 

1687 "characters." % (index.name, index.max_name_length), 

1688 obj=cls, 

1689 id="models.E034", 

1690 ), 

1691 ) 

1692 if index.contains_expressions: 

1693 for expression in index.expressions: 

1694 references.update( 

1695 ref[0] for ref in cls._get_expr_references(expression) 

1696 ) 

1697 for db in databases: 

1698 if not router.allow_migrate_model(db, cls): 

1699 continue 

1700 connection = connections[db] 

1701 if not ( 

1702 connection.features.supports_partial_indexes 

1703 or "supports_partial_indexes" in cls._meta.required_db_features 

1704 ) and any(index.condition is not None for index in cls._meta.indexes): 

1705 errors.append( 

1706 preflight.Warning( 

1707 f"{connection.display_name} does not support indexes with conditions.", 

1708 hint=( 

1709 "Conditions will be ignored. Silence this warning " 

1710 "if you don't care about it." 

1711 ), 

1712 obj=cls, 

1713 id="models.W037", 

1714 ) 

1715 ) 

1716 if not ( 

1717 connection.features.supports_covering_indexes 

1718 or "supports_covering_indexes" in cls._meta.required_db_features 

1719 ) and any(index.include for index in cls._meta.indexes): 

1720 errors.append( 

1721 preflight.Warning( 

1722 f"{connection.display_name} does not support indexes with non-key columns.", 

1723 hint=( 

1724 "Non-key columns will be ignored. Silence this " 

1725 "warning if you don't care about it." 

1726 ), 

1727 obj=cls, 

1728 id="models.W040", 

1729 ) 

1730 ) 

1731 if not ( 

1732 connection.features.supports_expression_indexes 

1733 or "supports_expression_indexes" in cls._meta.required_db_features 

1734 ) and any(index.contains_expressions for index in cls._meta.indexes): 

1735 errors.append( 

1736 preflight.Warning( 

1737 f"{connection.display_name} does not support indexes on expressions.", 

1738 hint=( 

1739 "An index won't be created. Silence this warning " 

1740 "if you don't care about it." 

1741 ), 

1742 obj=cls, 

1743 id="models.W043", 

1744 ) 

1745 ) 

1746 fields = [ 

1747 field for index in cls._meta.indexes for field, _ in index.fields_orders 

1748 ] 

1749 fields += [include for index in cls._meta.indexes for include in index.include] 

1750 fields += references 

1751 errors.extend(cls._check_local_fields(fields, "indexes")) 

1752 return errors 

1753 

1754 @classmethod 

1755 def _check_local_fields(cls, fields, option): 

1756 from plain import models 

1757 

1758 # In order to avoid hitting the relation tree prematurely, we use our 

1759 # own fields_map instead of using get_field() 

1760 forward_fields_map = {} 

1761 for field in cls._meta._get_fields(reverse=False): 

1762 forward_fields_map[field.name] = field 

1763 if hasattr(field, "attname"): 

1764 forward_fields_map[field.attname] = field 

1765 

1766 errors = [] 

1767 for field_name in fields: 

1768 try: 

1769 field = forward_fields_map[field_name] 

1770 except KeyError: 

1771 errors.append( 

1772 preflight.Error( 

1773 f"'{option}' refers to the nonexistent field '{field_name}'.", 

1774 obj=cls, 

1775 id="models.E012", 

1776 ) 

1777 ) 

1778 else: 

1779 if isinstance(field.remote_field, models.ManyToManyRel): 

1780 errors.append( 

1781 preflight.Error( 

1782 f"'{option}' refers to a ManyToManyField '{field_name}', but " 

1783 f"ManyToManyFields are not permitted in '{option}'.", 

1784 obj=cls, 

1785 id="models.E013", 

1786 ) 

1787 ) 

1788 elif field not in cls._meta.local_fields: 

1789 errors.append( 

1790 preflight.Error( 

1791 f"'{option}' refers to field '{field_name}' which is not local to model " 

1792 f"'{cls._meta.object_name}'.", 

1793 hint="This issue may be caused by multi-table inheritance.", 

1794 obj=cls, 

1795 id="models.E016", 

1796 ) 

1797 ) 

1798 return errors 

1799 

1800 @classmethod 

1801 def _check_ordering(cls): 

1802 """ 

1803 Check "ordering" option -- is it a list of strings and do all fields 

1804 exist? 

1805 """ 

1806 if cls._meta._ordering_clash: 

1807 return [ 

1808 preflight.Error( 

1809 "'ordering' and 'order_with_respect_to' cannot be used together.", 

1810 obj=cls, 

1811 id="models.E021", 

1812 ), 

1813 ] 

1814 

1815 if cls._meta.order_with_respect_to or not cls._meta.ordering: 

1816 return [] 

1817 

1818 if not isinstance(cls._meta.ordering, list | tuple): 

1819 return [ 

1820 preflight.Error( 

1821 "'ordering' must be a tuple or list (even if you want to order by " 

1822 "only one field).", 

1823 obj=cls, 

1824 id="models.E014", 

1825 ) 

1826 ] 

1827 

1828 errors = [] 

1829 fields = cls._meta.ordering 

1830 

1831 # Skip expressions and '?' fields. 

1832 fields = (f for f in fields if isinstance(f, str) and f != "?") 

1833 

1834 # Convert "-field" to "field". 

1835 fields = (f.removeprefix("-") for f in fields) 

1836 

1837 # Separate related fields and non-related fields. 

1838 _fields = [] 

1839 related_fields = [] 

1840 for f in fields: 

1841 if LOOKUP_SEP in f: 

1842 related_fields.append(f) 

1843 else: 

1844 _fields.append(f) 

1845 fields = _fields 

1846 

1847 # Check related fields. 

1848 for field in related_fields: 

1849 _cls = cls 

1850 fld = None 

1851 for part in field.split(LOOKUP_SEP): 

1852 try: 

1853 # pk is an alias that won't be found by opts.get_field. 

1854 if part == "pk": 

1855 fld = _cls._meta.pk 

1856 else: 

1857 fld = _cls._meta.get_field(part) 

1858 if fld.is_relation: 

1859 _cls = fld.path_infos[-1].to_opts.model 

1860 else: 

1861 _cls = None 

1862 except (FieldDoesNotExist, AttributeError): 

1863 if fld is None or ( 

1864 fld.get_transform(part) is None and fld.get_lookup(part) is None 

1865 ): 

1866 errors.append( 

1867 preflight.Error( 

1868 "'ordering' refers to the nonexistent field, " 

1869 f"related field, or lookup '{field}'.", 

1870 obj=cls, 

1871 id="models.E015", 

1872 ) 

1873 ) 

1874 

1875 # Skip ordering on pk. This is always a valid order_by field 

1876 # but is an alias and therefore won't be found by opts.get_field. 

1877 fields = {f for f in fields if f != "pk"} 

1878 

1879 # Check for invalid or nonexistent fields in ordering. 

1880 invalid_fields = [] 

1881 

1882 # Any field name that is not present in field_names does not exist. 

1883 # Also, ordering by m2m fields is not allowed. 

1884 opts = cls._meta 

1885 valid_fields = set( 

1886 chain.from_iterable( 

1887 (f.name, f.attname) 

1888 if not (f.auto_created and not f.concrete) 

1889 else (f.field.related_query_name(),) 

1890 for f in chain(opts.fields, opts.related_objects) 

1891 ) 

1892 ) 

1893 

1894 invalid_fields.extend(fields - valid_fields) 

1895 

1896 for invalid_field in invalid_fields: 

1897 errors.append( 

1898 preflight.Error( 

1899 "'ordering' refers to the nonexistent field, related " 

1900 f"field, or lookup '{invalid_field}'.", 

1901 obj=cls, 

1902 id="models.E015", 

1903 ) 

1904 ) 

1905 return errors 

1906 

1907 @classmethod 

1908 def _check_long_column_names(cls, databases): 

1909 """ 

1910 Check that any auto-generated column names are shorter than the limits 

1911 for each database in which the model will be created. 

1912 """ 

1913 if not databases: 

1914 return [] 

1915 errors = [] 

1916 allowed_len = None 

1917 db_alias = None 

1918 

1919 # Find the minimum max allowed length among all specified db_aliases. 

1920 for db in databases: 

1921 # skip databases where the model won't be created 

1922 if not router.allow_migrate_model(db, cls): 

1923 continue 

1924 connection = connections[db] 

1925 max_name_length = connection.ops.max_name_length() 

1926 if max_name_length is None or connection.features.truncates_names: 

1927 continue 

1928 else: 

1929 if allowed_len is None: 

1930 allowed_len = max_name_length 

1931 db_alias = db 

1932 elif max_name_length < allowed_len: 

1933 allowed_len = max_name_length 

1934 db_alias = db 

1935 

1936 if allowed_len is None: 

1937 return errors 

1938 

1939 for f in cls._meta.local_fields: 

1940 _, column_name = f.get_attname_column() 

1941 

1942 # Check if auto-generated name for the field is too long 

1943 # for the database. 

1944 if ( 

1945 f.db_column is None 

1946 and column_name is not None 

1947 and len(column_name) > allowed_len 

1948 ): 

1949 errors.append( 

1950 preflight.Error( 

1951 f'Autogenerated column name too long for field "{column_name}". ' 

1952 f'Maximum length is "{allowed_len}" for database "{db_alias}".', 

1953 hint="Set the column name manually using 'db_column'.", 

1954 obj=cls, 

1955 id="models.E018", 

1956 ) 

1957 ) 

1958 

1959 for f in cls._meta.local_many_to_many: 

1960 # Skip nonexistent models. 

1961 if isinstance(f.remote_field.through, str): 

1962 continue 

1963 

1964 # Check if auto-generated name for the M2M field is too long 

1965 # for the database. 

1966 for m2m in f.remote_field.through._meta.local_fields: 

1967 _, rel_name = m2m.get_attname_column() 

1968 if ( 

1969 m2m.db_column is None 

1970 and rel_name is not None 

1971 and len(rel_name) > allowed_len 

1972 ): 

1973 errors.append( 

1974 preflight.Error( 

1975 "Autogenerated column name too long for M2M field " 

1976 f'"{rel_name}". Maximum length is "{allowed_len}" for database "{db_alias}".', 

1977 hint=( 

1978 "Use 'through' to create a separate model for " 

1979 "M2M and then set column_name using 'db_column'." 

1980 ), 

1981 obj=cls, 

1982 id="models.E019", 

1983 ) 

1984 ) 

1985 

1986 return errors 

1987 

1988 @classmethod 

1989 def _get_expr_references(cls, expr): 

1990 if isinstance(expr, Q): 

1991 for child in expr.children: 

1992 if isinstance(child, tuple): 

1993 lookup, value = child 

1994 yield tuple(lookup.split(LOOKUP_SEP)) 

1995 yield from cls._get_expr_references(value) 

1996 else: 

1997 yield from cls._get_expr_references(child) 

1998 elif isinstance(expr, F): 

1999 yield tuple(expr.name.split(LOOKUP_SEP)) 

2000 elif hasattr(expr, "get_source_expressions"): 

2001 for src_expr in expr.get_source_expressions(): 

2002 yield from cls._get_expr_references(src_expr) 

2003 

2004 @classmethod 

2005 def _check_constraints(cls, databases): 

2006 errors = [] 

2007 for db in databases: 

2008 if not router.allow_migrate_model(db, cls): 

2009 continue 

2010 connection = connections[db] 

2011 if not ( 

2012 connection.features.supports_table_check_constraints 

2013 or "supports_table_check_constraints" in cls._meta.required_db_features 

2014 ) and any( 

2015 isinstance(constraint, CheckConstraint) 

2016 for constraint in cls._meta.constraints 

2017 ): 

2018 errors.append( 

2019 preflight.Warning( 

2020 f"{connection.display_name} does not support check constraints.", 

2021 hint=( 

2022 "A constraint won't be created. Silence this " 

2023 "warning if you don't care about it." 

2024 ), 

2025 obj=cls, 

2026 id="models.W027", 

2027 ) 

2028 ) 

2029 if not ( 

2030 connection.features.supports_partial_indexes 

2031 or "supports_partial_indexes" in cls._meta.required_db_features 

2032 ) and any( 

2033 isinstance(constraint, UniqueConstraint) 

2034 and constraint.condition is not None 

2035 for constraint in cls._meta.constraints 

2036 ): 

2037 errors.append( 

2038 preflight.Warning( 

2039 f"{connection.display_name} does not support unique constraints with " 

2040 "conditions.", 

2041 hint=( 

2042 "A constraint won't be created. Silence this " 

2043 "warning if you don't care about it." 

2044 ), 

2045 obj=cls, 

2046 id="models.W036", 

2047 ) 

2048 ) 

2049 if not ( 

2050 connection.features.supports_deferrable_unique_constraints 

2051 or "supports_deferrable_unique_constraints" 

2052 in cls._meta.required_db_features 

2053 ) and any( 

2054 isinstance(constraint, UniqueConstraint) 

2055 and constraint.deferrable is not None 

2056 for constraint in cls._meta.constraints 

2057 ): 

2058 errors.append( 

2059 preflight.Warning( 

2060 f"{connection.display_name} does not support deferrable unique constraints.", 

2061 hint=( 

2062 "A constraint won't be created. Silence this " 

2063 "warning if you don't care about it." 

2064 ), 

2065 obj=cls, 

2066 id="models.W038", 

2067 ) 

2068 ) 

2069 if not ( 

2070 connection.features.supports_covering_indexes 

2071 or "supports_covering_indexes" in cls._meta.required_db_features 

2072 ) and any( 

2073 isinstance(constraint, UniqueConstraint) and constraint.include 

2074 for constraint in cls._meta.constraints 

2075 ): 

2076 errors.append( 

2077 preflight.Warning( 

2078 f"{connection.display_name} does not support unique constraints with non-key " 

2079 "columns.", 

2080 hint=( 

2081 "A constraint won't be created. Silence this " 

2082 "warning if you don't care about it." 

2083 ), 

2084 obj=cls, 

2085 id="models.W039", 

2086 ) 

2087 ) 

2088 if not ( 

2089 connection.features.supports_expression_indexes 

2090 or "supports_expression_indexes" in cls._meta.required_db_features 

2091 ) and any( 

2092 isinstance(constraint, UniqueConstraint) 

2093 and constraint.contains_expressions 

2094 for constraint in cls._meta.constraints 

2095 ): 

2096 errors.append( 

2097 preflight.Warning( 

2098 f"{connection.display_name} does not support unique constraints on " 

2099 "expressions.", 

2100 hint=( 

2101 "A constraint won't be created. Silence this " 

2102 "warning if you don't care about it." 

2103 ), 

2104 obj=cls, 

2105 id="models.W044", 

2106 ) 

2107 ) 

2108 fields = set( 

2109 chain.from_iterable( 

2110 (*constraint.fields, *constraint.include) 

2111 for constraint in cls._meta.constraints 

2112 if isinstance(constraint, UniqueConstraint) 

2113 ) 

2114 ) 

2115 references = set() 

2116 for constraint in cls._meta.constraints: 

2117 if isinstance(constraint, UniqueConstraint): 

2118 if ( 

2119 connection.features.supports_partial_indexes 

2120 or "supports_partial_indexes" 

2121 not in cls._meta.required_db_features 

2122 ) and isinstance(constraint.condition, Q): 

2123 references.update( 

2124 cls._get_expr_references(constraint.condition) 

2125 ) 

2126 if ( 

2127 connection.features.supports_expression_indexes 

2128 or "supports_expression_indexes" 

2129 not in cls._meta.required_db_features 

2130 ) and constraint.contains_expressions: 

2131 for expression in constraint.expressions: 

2132 references.update(cls._get_expr_references(expression)) 

2133 elif isinstance(constraint, CheckConstraint): 

2134 if ( 

2135 connection.features.supports_table_check_constraints 

2136 or "supports_table_check_constraints" 

2137 not in cls._meta.required_db_features 

2138 ): 

2139 if isinstance(constraint.check, Q): 

2140 references.update( 

2141 cls._get_expr_references(constraint.check) 

2142 ) 

2143 if any( 

2144 isinstance(expr, RawSQL) 

2145 for expr in constraint.check.flatten() 

2146 ): 

2147 errors.append( 

2148 preflight.Warning( 

2149 f"Check constraint {constraint.name!r} contains " 

2150 f"RawSQL() expression and won't be validated " 

2151 f"during the model full_clean().", 

2152 hint=( 

2153 "Silence this warning if you don't care about " 

2154 "it." 

2155 ), 

2156 obj=cls, 

2157 id="models.W045", 

2158 ), 

2159 ) 

2160 for field_name, *lookups in references: 

2161 # pk is an alias that won't be found by opts.get_field. 

2162 if field_name != "pk": 

2163 fields.add(field_name) 

2164 if not lookups: 

2165 # If it has no lookups it cannot result in a JOIN. 

2166 continue 

2167 try: 

2168 if field_name == "pk": 

2169 field = cls._meta.pk 

2170 else: 

2171 field = cls._meta.get_field(field_name) 

2172 if not field.is_relation or field.many_to_many or field.one_to_many: 

2173 continue 

2174 except FieldDoesNotExist: 

2175 continue 

2176 # JOIN must happen at the first lookup. 

2177 first_lookup = lookups[0] 

2178 if ( 

2179 hasattr(field, "get_transform") 

2180 and hasattr(field, "get_lookup") 

2181 and field.get_transform(first_lookup) is None 

2182 and field.get_lookup(first_lookup) is None 

2183 ): 

2184 errors.append( 

2185 preflight.Error( 

2186 f"'constraints' refers to the joined field '{LOOKUP_SEP.join([field_name] + lookups)}'.", 

2187 obj=cls, 

2188 id="models.E041", 

2189 ) 

2190 ) 

2191 errors.extend(cls._check_local_fields(fields, "constraints")) 

2192 return errors 

2193 

2194 

2195############################################ 

2196# HELPER FUNCTIONS (CURRIED MODEL METHODS) # 

2197############################################ 

2198 

2199# ORDERING METHODS ######################### 

2200 

2201 

2202def method_set_order(self, ordered_obj, id_list, using=None): 

2203 order_wrt = ordered_obj._meta.order_with_respect_to 

2204 filter_args = order_wrt.get_forward_related_filter(self) 

2205 ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update( 

2206 [ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list)], 

2207 ["_order"], 

2208 ) 

2209 

2210 

2211def method_get_order(self, ordered_obj): 

2212 order_wrt = ordered_obj._meta.order_with_respect_to 

2213 filter_args = order_wrt.get_forward_related_filter(self) 

2214 pk_name = ordered_obj._meta.pk.name 

2215 return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True) 

2216 

2217 

2218def make_foreign_order_accessors(model, related_model): 

2219 setattr( 

2220 related_model, 

2221 f"get_{model.__name__.lower()}_order", 

2222 partialmethod(method_get_order, model), 

2223 ) 

2224 setattr( 

2225 related_model, 

2226 f"set_{model.__name__.lower()}_order", 

2227 partialmethod(method_set_order, model), 

2228 ) 

2229 

2230 

2231######## 

2232# MISC # 

2233######## 

2234 

2235 

2236def model_unpickle(model_id): 

2237 """Used to unpickle Model subclasses with deferred fields.""" 

2238 if isinstance(model_id, tuple): 

2239 model = packages.get_model(*model_id) 

2240 else: 

2241 # Backwards compat - the model was cached directly in earlier versions. 

2242 model = model_id 

2243 return model.__new__(model) 

2244 

2245 

2246model_unpickle.__safe_for_unpickle__ = True