Coverage for /Users/davegaeddert/Developer/dropseed/plain/plain-models/plain/models/backends/base/schema.py: 35%

677 statements  

« prev     ^ index     » next       coverage.py v7.6.9, created at 2024-12-23 11:16 -0600

1import logging 

2import operator 

3from datetime import datetime 

4 

5from plain.models.backends.ddl_references import ( 

6 Columns, 

7 Expressions, 

8 ForeignKeyName, 

9 IndexName, 

10 Statement, 

11 Table, 

12) 

13from plain.models.backends.utils import names_digest, split_identifier, truncate_name 

14from plain.models.constraints import Deferrable 

15from plain.models.indexes import Index 

16from plain.models.sql import Query 

17from plain.models.transaction import TransactionManagementError, atomic 

18from plain.runtime import settings 

19from plain.utils import timezone 

20 

21logger = logging.getLogger("plain.models.backends.schema") 

22 

23 

24def _is_relevant_relation(relation, altered_field): 

25 """ 

26 When altering the given field, must constraints on its model from the given 

27 relation be temporarily dropped? 

28 """ 

29 field = relation.field 

30 if field.many_to_many: 

31 # M2M reverse field 

32 return False 

33 if altered_field.primary_key and field.to_fields == [None]: 

34 # Foreign key constraint on the primary key, which is being altered. 

35 return True 

36 # Is the constraint targeting the field being altered? 

37 return altered_field.name in field.to_fields 

38 

39 

40def _all_related_fields(model): 

41 # Related fields must be returned in a deterministic order. 

42 return sorted( 

43 model._meta._get_fields( 

44 forward=False, 

45 reverse=True, 

46 include_hidden=True, 

47 include_parents=False, 

48 ), 

49 key=operator.attrgetter("name"), 

50 ) 

51 

52 

53def _related_non_m2m_objects(old_field, new_field): 

54 # Filter out m2m objects from reverse relations. 

55 # Return (old_relation, new_relation) tuples. 

56 related_fields = zip( 

57 ( 

58 obj 

59 for obj in _all_related_fields(old_field.model) 

60 if _is_relevant_relation(obj, old_field) 

61 ), 

62 ( 

63 obj 

64 for obj in _all_related_fields(new_field.model) 

65 if _is_relevant_relation(obj, new_field) 

66 ), 

67 ) 

68 for old_rel, new_rel in related_fields: 

69 yield old_rel, new_rel 

70 yield from _related_non_m2m_objects( 

71 old_rel.remote_field, 

72 new_rel.remote_field, 

73 ) 

74 

75 

76class BaseDatabaseSchemaEditor: 

77 """ 

78 This class and its subclasses are responsible for emitting schema-changing 

79 statements to the databases - model creation/removal/alteration, field 

80 renaming, index fiddling, and so on. 

81 """ 

82 

83 # Overrideable SQL templates 

84 sql_create_table = "CREATE TABLE %(table)s (%(definition)s)" 

85 sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" 

86 sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s" 

87 sql_delete_table = "DROP TABLE %(table)s CASCADE" 

88 

89 sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s" 

90 sql_alter_column = "ALTER TABLE %(table)s %(changes)s" 

91 sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s" 

92 sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL" 

93 sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL" 

94 sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s" 

95 sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT" 

96 sql_alter_column_no_default_null = sql_alter_column_no_default 

97 sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE" 

98 sql_rename_column = ( 

99 "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s" 

100 ) 

101 sql_update_with_default = ( 

102 "UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL" 

103 ) 

104 

105 sql_unique_constraint = "UNIQUE (%(columns)s)%(deferrable)s" 

106 sql_check_constraint = "CHECK (%(check)s)" 

107 sql_delete_constraint = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" 

108 sql_constraint = "CONSTRAINT %(name)s %(constraint)s" 

109 

110 sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)" 

111 sql_delete_check = sql_delete_constraint 

112 

113 sql_create_unique = ( 

114 "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s " 

115 "UNIQUE (%(columns)s)%(deferrable)s" 

116 ) 

117 sql_delete_unique = sql_delete_constraint 

118 

119 sql_create_fk = ( 

120 "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) " 

121 "REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s" 

122 ) 

123 sql_create_inline_fk = None 

124 sql_create_column_inline_fk = None 

125 sql_delete_fk = sql_delete_constraint 

126 

127 sql_create_index = ( 

128 "CREATE INDEX %(name)s ON %(table)s " 

129 "(%(columns)s)%(include)s%(extra)s%(condition)s" 

130 ) 

131 sql_create_unique_index = ( 

132 "CREATE UNIQUE INDEX %(name)s ON %(table)s " 

133 "(%(columns)s)%(include)s%(condition)s" 

134 ) 

135 sql_rename_index = "ALTER INDEX %(old_name)s RENAME TO %(new_name)s" 

136 sql_delete_index = "DROP INDEX %(name)s" 

137 

138 sql_create_pk = ( 

139 "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" 

140 ) 

141 sql_delete_pk = sql_delete_constraint 

142 

143 sql_delete_procedure = "DROP PROCEDURE %(procedure)s" 

144 

145 sql_alter_table_comment = "COMMENT ON TABLE %(table)s IS %(comment)s" 

146 sql_alter_column_comment = "COMMENT ON COLUMN %(table)s.%(column)s IS %(comment)s" 

147 

148 def __init__(self, connection, collect_sql=False, atomic=True): 

149 self.connection = connection 

150 self.collect_sql = collect_sql 

151 if self.collect_sql: 

152 self.collected_sql = [] 

153 self.atomic_migration = self.connection.features.can_rollback_ddl and atomic 

154 

155 # State-managing methods 

156 

157 def __enter__(self): 

158 self.deferred_sql = [] 

159 if self.atomic_migration: 

160 self.atomic = atomic(self.connection.alias) 

161 self.atomic.__enter__() 

162 return self 

163 

164 def __exit__(self, exc_type, exc_value, traceback): 

165 if exc_type is None: 

166 for sql in self.deferred_sql: 

167 self.execute(sql) 

168 if self.atomic_migration: 

169 self.atomic.__exit__(exc_type, exc_value, traceback) 

170 

171 # Core utility functions 

172 

173 def execute(self, sql, params=()): 

174 """Execute the given SQL statement, with optional parameters.""" 

175 # Don't perform the transactional DDL check if SQL is being collected 

176 # as it's not going to be executed anyway. 

177 if ( 

178 not self.collect_sql 

179 and self.connection.in_atomic_block 

180 and not self.connection.features.can_rollback_ddl 

181 ): 

182 raise TransactionManagementError( 

183 "Executing DDL statements while in a transaction on databases " 

184 "that can't perform a rollback is prohibited." 

185 ) 

186 # Account for non-string statement objects. 

187 sql = str(sql) 

188 # Log the command we're running, then run it 

189 logger.debug( 

190 "%s; (params %r)", sql, params, extra={"params": params, "sql": sql} 

191 ) 

192 if self.collect_sql: 

193 ending = "" if sql.rstrip().endswith(";") else ";" 

194 if params is not None: 

195 self.collected_sql.append( 

196 (sql % tuple(map(self.quote_value, params))) + ending 

197 ) 

198 else: 

199 self.collected_sql.append(sql + ending) 

200 else: 

201 with self.connection.cursor() as cursor: 

202 cursor.execute(sql, params) 

203 

204 def quote_name(self, name): 

205 return self.connection.ops.quote_name(name) 

206 

207 def table_sql(self, model): 

208 """Take a model and return its table definition.""" 

209 # Create column SQL, add FK deferreds if needed. 

210 column_sqls = [] 

211 params = [] 

212 for field in model._meta.local_fields: 

213 # SQL. 

214 definition, extra_params = self.column_sql(model, field) 

215 if definition is None: 

216 continue 

217 # Check constraints can go on the column SQL here. 

218 db_params = field.db_parameters(connection=self.connection) 

219 if db_params["check"]: 

220 definition += " " + self.sql_check_constraint % db_params 

221 # Autoincrement SQL (for backends with inline variant). 

222 col_type_suffix = field.db_type_suffix(connection=self.connection) 

223 if col_type_suffix: 

224 definition += f" {col_type_suffix}" 

225 params.extend(extra_params) 

226 # FK. 

227 if field.remote_field and field.db_constraint: 

228 to_table = field.remote_field.model._meta.db_table 

229 to_column = field.remote_field.model._meta.get_field( 

230 field.remote_field.field_name 

231 ).column 

232 if self.sql_create_inline_fk: 

233 definition += " " + self.sql_create_inline_fk % { 

234 "to_table": self.quote_name(to_table), 

235 "to_column": self.quote_name(to_column), 

236 } 

237 elif self.connection.features.supports_foreign_keys: 

238 self.deferred_sql.append( 

239 self._create_fk_sql( 

240 model, field, "_fk_%(to_table)s_%(to_column)s" 

241 ) 

242 ) 

243 # Add the SQL to our big list. 

244 column_sqls.append(f"{self.quote_name(field.column)} {definition}") 

245 # Autoincrement SQL (for backends with post table definition 

246 # variant). 

247 if field.get_internal_type() in ( 

248 "AutoField", 

249 "BigAutoField", 

250 "SmallAutoField", 

251 ): 

252 autoinc_sql = self.connection.ops.autoinc_sql( 

253 model._meta.db_table, field.column 

254 ) 

255 if autoinc_sql: 

256 self.deferred_sql.extend(autoinc_sql) 

257 constraints = [ 

258 constraint.constraint_sql(model, self) 

259 for constraint in model._meta.constraints 

260 ] 

261 sql = self.sql_create_table % { 

262 "table": self.quote_name(model._meta.db_table), 

263 "definition": ", ".join( 

264 str(constraint) 

265 for constraint in (*column_sqls, *constraints) 

266 if constraint 

267 ), 

268 } 

269 if model._meta.db_tablespace: 

270 tablespace_sql = self.connection.ops.tablespace_sql( 

271 model._meta.db_tablespace 

272 ) 

273 if tablespace_sql: 

274 sql += " " + tablespace_sql 

275 return sql, params 

276 

277 # Field <-> database mapping functions 

278 

279 def _iter_column_sql( 

280 self, column_db_type, params, model, field, field_db_params, include_default 

281 ): 

282 yield column_db_type 

283 if collation := field_db_params.get("collation"): 

284 yield self._collate_sql(collation) 

285 if self.connection.features.supports_comments_inline and field.db_comment: 

286 yield self._comment_sql(field.db_comment) 

287 # Work out nullability. 

288 null = field.null 

289 # Include a default value, if requested. 

290 include_default = ( 

291 include_default 

292 and not self.skip_default(field) 

293 and 

294 # Don't include a default value if it's a nullable field and the 

295 # default cannot be dropped in the ALTER COLUMN statement (e.g. 

296 # MySQL longtext and longblob). 

297 not (null and self.skip_default_on_alter(field)) 

298 ) 

299 if include_default: 

300 default_value = self.effective_default(field) 

301 if default_value is not None: 

302 column_default = "DEFAULT " + self._column_default_sql(field) 

303 if self.connection.features.requires_literal_defaults: 

304 # Some databases can't take defaults as a parameter (Oracle). 

305 # If this is the case, the individual schema backend should 

306 # implement prepare_default(). 

307 yield column_default % self.prepare_default(default_value) 

308 else: 

309 yield column_default 

310 params.append(default_value) 

311 # Oracle treats the empty string ('') as null, so coerce the null 

312 # option whenever '' is a possible value. 

313 if ( 

314 field.empty_strings_allowed 

315 and not field.primary_key 

316 and self.connection.features.interprets_empty_strings_as_nulls 

317 ): 

318 null = True 

319 if not null: 

320 yield "NOT NULL" 

321 elif not self.connection.features.implied_column_null: 

322 yield "NULL" 

323 if field.primary_key: 

324 yield "PRIMARY KEY" 

325 elif field.unique: 

326 yield "UNIQUE" 

327 # Optionally add the tablespace if it's an implicitly indexed column. 

328 tablespace = field.db_tablespace or model._meta.db_tablespace 

329 if ( 

330 tablespace 

331 and self.connection.features.supports_tablespaces 

332 and field.unique 

333 ): 

334 yield self.connection.ops.tablespace_sql(tablespace, inline=True) 

335 

336 def column_sql(self, model, field, include_default=False): 

337 """ 

338 Return the column definition for a field. The field must already have 

339 had set_attributes_from_name() called. 

340 """ 

341 # Get the column's type and use that as the basis of the SQL. 

342 field_db_params = field.db_parameters(connection=self.connection) 

343 column_db_type = field_db_params["type"] 

344 # Check for fields that aren't actually columns (e.g. M2M). 

345 if column_db_type is None: 

346 return None, None 

347 params = [] 

348 return ( 

349 " ".join( 

350 # This appends to the params being returned. 

351 self._iter_column_sql( 

352 column_db_type, 

353 params, 

354 model, 

355 field, 

356 field_db_params, 

357 include_default, 

358 ) 

359 ), 

360 params, 

361 ) 

362 

363 def skip_default(self, field): 

364 """ 

365 Some backends don't accept default values for certain columns types 

366 (i.e. MySQL longtext and longblob). 

367 """ 

368 return False 

369 

370 def skip_default_on_alter(self, field): 

371 """ 

372 Some backends don't accept default values for certain columns types 

373 (i.e. MySQL longtext and longblob) in the ALTER COLUMN statement. 

374 """ 

375 return False 

376 

377 def prepare_default(self, value): 

378 """ 

379 Only used for backends which have requires_literal_defaults feature 

380 """ 

381 raise NotImplementedError( 

382 "subclasses of BaseDatabaseSchemaEditor for backends which have " 

383 "requires_literal_defaults must provide a prepare_default() method" 

384 ) 

385 

386 def _column_default_sql(self, field): 

387 """ 

388 Return the SQL to use in a DEFAULT clause. The resulting string should 

389 contain a '%s' placeholder for a default value. 

390 """ 

391 return "%s" 

392 

393 @staticmethod 

394 def _effective_default(field): 

395 # This method allows testing its logic without a connection. 

396 if field.has_default(): 

397 default = field.get_default() 

398 elif not field.null and field.blank and field.empty_strings_allowed: 

399 if field.get_internal_type() == "BinaryField": 

400 default = b"" 

401 else: 

402 default = "" 

403 elif getattr(field, "auto_now", False) or getattr(field, "auto_now_add", False): 

404 internal_type = field.get_internal_type() 

405 if internal_type == "DateTimeField": 

406 default = timezone.now() 

407 else: 

408 default = datetime.now() 

409 if internal_type == "DateField": 

410 default = default.date() 

411 elif internal_type == "TimeField": 

412 default = default.time() 

413 else: 

414 default = None 

415 return default 

416 

417 def effective_default(self, field): 

418 """Return a field's effective database default value.""" 

419 return field.get_db_prep_save(self._effective_default(field), self.connection) 

420 

421 def quote_value(self, value): 

422 """ 

423 Return a quoted version of the value so it's safe to use in an SQL 

424 string. This is not safe against injection from user code; it is 

425 intended only for use in making SQL scripts or preparing default values 

426 for particularly tricky backends (defaults are not user-defined, though, 

427 so this is safe). 

428 """ 

429 raise NotImplementedError() 

430 

431 # Actions 

432 

433 def create_model(self, model): 

434 """ 

435 Create a table and any accompanying indexes or unique constraints for 

436 the given `model`. 

437 """ 

438 sql, params = self.table_sql(model) 

439 # Prevent using [] as params, in the case a literal '%' is used in the 

440 # definition. 

441 self.execute(sql, params or None) 

442 

443 if self.connection.features.supports_comments: 

444 # Add table comment. 

445 if model._meta.db_table_comment: 

446 self.alter_db_table_comment(model, None, model._meta.db_table_comment) 

447 # Add column comments. 

448 if not self.connection.features.supports_comments_inline: 

449 for field in model._meta.local_fields: 

450 if field.db_comment: 

451 field_db_params = field.db_parameters( 

452 connection=self.connection 

453 ) 

454 field_type = field_db_params["type"] 

455 self.execute( 

456 *self._alter_column_comment_sql( 

457 model, field, field_type, field.db_comment 

458 ) 

459 ) 

460 # Add any field index (deferred as SQLite _remake_table needs it). 

461 self.deferred_sql.extend(self._model_indexes_sql(model)) 

462 

463 # Make M2M tables 

464 for field in model._meta.local_many_to_many: 

465 if field.remote_field.through._meta.auto_created: 

466 self.create_model(field.remote_field.through) 

467 

468 def delete_model(self, model): 

469 """Delete a model from the database.""" 

470 # Handle auto-created intermediary models 

471 for field in model._meta.local_many_to_many: 

472 if field.remote_field.through._meta.auto_created: 

473 self.delete_model(field.remote_field.through) 

474 

475 # Delete the table 

476 self.execute( 

477 self.sql_delete_table 

478 % { 

479 "table": self.quote_name(model._meta.db_table), 

480 } 

481 ) 

482 # Remove all deferred statements referencing the deleted table. 

483 for sql in list(self.deferred_sql): 

484 if isinstance(sql, Statement) and sql.references_table( 

485 model._meta.db_table 

486 ): 

487 self.deferred_sql.remove(sql) 

488 

489 def add_index(self, model, index): 

490 """Add an index on a model.""" 

491 if ( 

492 index.contains_expressions 

493 and not self.connection.features.supports_expression_indexes 

494 ): 

495 return None 

496 # Index.create_sql returns interpolated SQL which makes params=None a 

497 # necessity to avoid escaping attempts on execution. 

498 self.execute(index.create_sql(model, self), params=None) 

499 

500 def remove_index(self, model, index): 

501 """Remove an index from a model.""" 

502 if ( 

503 index.contains_expressions 

504 and not self.connection.features.supports_expression_indexes 

505 ): 

506 return None 

507 self.execute(index.remove_sql(model, self)) 

508 

509 def rename_index(self, model, old_index, new_index): 

510 if self.connection.features.can_rename_index: 

511 self.execute( 

512 self._rename_index_sql(model, old_index.name, new_index.name), 

513 params=None, 

514 ) 

515 else: 

516 self.remove_index(model, old_index) 

517 self.add_index(model, new_index) 

518 

519 def add_constraint(self, model, constraint): 

520 """Add a constraint to a model.""" 

521 sql = constraint.create_sql(model, self) 

522 if sql: 

523 # Constraint.create_sql returns interpolated SQL which makes 

524 # params=None a necessity to avoid escaping attempts on execution. 

525 self.execute(sql, params=None) 

526 

527 def remove_constraint(self, model, constraint): 

528 """Remove a constraint from a model.""" 

529 sql = constraint.remove_sql(model, self) 

530 if sql: 

531 self.execute(sql) 

532 

533 def alter_db_table(self, model, old_db_table, new_db_table): 

534 """Rename the table a model points to.""" 

535 if old_db_table == new_db_table or ( 

536 self.connection.features.ignores_table_name_case 

537 and old_db_table.lower() == new_db_table.lower() 

538 ): 

539 return 

540 self.execute( 

541 self.sql_rename_table 

542 % { 

543 "old_table": self.quote_name(old_db_table), 

544 "new_table": self.quote_name(new_db_table), 

545 } 

546 ) 

547 # Rename all references to the old table name. 

548 for sql in self.deferred_sql: 

549 if isinstance(sql, Statement): 

550 sql.rename_table_references(old_db_table, new_db_table) 

551 

552 def alter_db_table_comment(self, model, old_db_table_comment, new_db_table_comment): 

553 self.execute( 

554 self.sql_alter_table_comment 

555 % { 

556 "table": self.quote_name(model._meta.db_table), 

557 "comment": self.quote_value(new_db_table_comment or ""), 

558 } 

559 ) 

560 

561 def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace): 

562 """Move a model's table between tablespaces.""" 

563 self.execute( 

564 self.sql_retablespace_table 

565 % { 

566 "table": self.quote_name(model._meta.db_table), 

567 "old_tablespace": self.quote_name(old_db_tablespace), 

568 "new_tablespace": self.quote_name(new_db_tablespace), 

569 } 

570 ) 

571 

572 def add_field(self, model, field): 

573 """ 

574 Create a field on a model. Usually involves adding a column, but may 

575 involve adding a table instead (for M2M fields). 

576 """ 

577 # Special-case implicit M2M tables 

578 if field.many_to_many and field.remote_field.through._meta.auto_created: 

579 return self.create_model(field.remote_field.through) 

580 # Get the column's definition 

581 definition, params = self.column_sql(model, field, include_default=True) 

582 # It might not actually have a column behind it 

583 if definition is None: 

584 return 

585 if col_type_suffix := field.db_type_suffix(connection=self.connection): 

586 definition += f" {col_type_suffix}" 

587 # Check constraints can go on the column SQL here 

588 db_params = field.db_parameters(connection=self.connection) 

589 if db_params["check"]: 

590 definition += " " + self.sql_check_constraint % db_params 

591 if ( 

592 field.remote_field 

593 and self.connection.features.supports_foreign_keys 

594 and field.db_constraint 

595 ): 

596 constraint_suffix = "_fk_%(to_table)s_%(to_column)s" 

597 # Add FK constraint inline, if supported. 

598 if self.sql_create_column_inline_fk: 

599 to_table = field.remote_field.model._meta.db_table 

600 to_column = field.remote_field.model._meta.get_field( 

601 field.remote_field.field_name 

602 ).column 

603 namespace, _ = split_identifier(model._meta.db_table) 

604 definition += " " + self.sql_create_column_inline_fk % { 

605 "name": self._fk_constraint_name(model, field, constraint_suffix), 

606 "namespace": f"{self.quote_name(namespace)}." if namespace else "", 

607 "column": self.quote_name(field.column), 

608 "to_table": self.quote_name(to_table), 

609 "to_column": self.quote_name(to_column), 

610 "deferrable": self.connection.ops.deferrable_sql(), 

611 } 

612 # Otherwise, add FK constraints later. 

613 else: 

614 self.deferred_sql.append( 

615 self._create_fk_sql(model, field, constraint_suffix) 

616 ) 

617 # Build the SQL and run it 

618 sql = self.sql_create_column % { 

619 "table": self.quote_name(model._meta.db_table), 

620 "column": self.quote_name(field.column), 

621 "definition": definition, 

622 } 

623 self.execute(sql, params) 

624 # Drop the default if we need to 

625 # (Plain usually does not use in-database defaults) 

626 if ( 

627 not self.skip_default_on_alter(field) 

628 and self.effective_default(field) is not None 

629 ): 

630 changes_sql, params = self._alter_column_default_sql( 

631 model, None, field, drop=True 

632 ) 

633 sql = self.sql_alter_column % { 

634 "table": self.quote_name(model._meta.db_table), 

635 "changes": changes_sql, 

636 } 

637 self.execute(sql, params) 

638 # Add field comment, if required. 

639 if ( 

640 field.db_comment 

641 and self.connection.features.supports_comments 

642 and not self.connection.features.supports_comments_inline 

643 ): 

644 field_type = db_params["type"] 

645 self.execute( 

646 *self._alter_column_comment_sql( 

647 model, field, field_type, field.db_comment 

648 ) 

649 ) 

650 # Add an index, if required 

651 self.deferred_sql.extend(self._field_indexes_sql(model, field)) 

652 # Reset connection if required 

653 if self.connection.features.connection_persists_old_columns: 

654 self.connection.close() 

655 

656 def remove_field(self, model, field): 

657 """ 

658 Remove a field from a model. Usually involves deleting a column, 

659 but for M2Ms may involve deleting a table. 

660 """ 

661 # Special-case implicit M2M tables 

662 if field.many_to_many and field.remote_field.through._meta.auto_created: 

663 return self.delete_model(field.remote_field.through) 

664 # It might not actually have a column behind it 

665 if field.db_parameters(connection=self.connection)["type"] is None: 

666 return 

667 # Drop any FK constraints, MySQL requires explicit deletion 

668 if field.remote_field: 

669 fk_names = self._constraint_names(model, [field.column], foreign_key=True) 

670 for fk_name in fk_names: 

671 self.execute(self._delete_fk_sql(model, fk_name)) 

672 # Delete the column 

673 sql = self.sql_delete_column % { 

674 "table": self.quote_name(model._meta.db_table), 

675 "column": self.quote_name(field.column), 

676 } 

677 self.execute(sql) 

678 # Reset connection if required 

679 if self.connection.features.connection_persists_old_columns: 

680 self.connection.close() 

681 # Remove all deferred statements referencing the deleted column. 

682 for sql in list(self.deferred_sql): 

683 if isinstance(sql, Statement) and sql.references_column( 

684 model._meta.db_table, field.column 

685 ): 

686 self.deferred_sql.remove(sql) 

687 

688 def alter_field(self, model, old_field, new_field, strict=False): 

689 """ 

690 Allow a field's type, uniqueness, nullability, default, column, 

691 constraints, etc. to be modified. 

692 `old_field` is required to compute the necessary changes. 

693 If `strict` is True, raise errors if the old column does not match 

694 `old_field` precisely. 

695 """ 

696 if not self._field_should_be_altered(old_field, new_field): 

697 return 

698 # Ensure this field is even column-based 

699 old_db_params = old_field.db_parameters(connection=self.connection) 

700 old_type = old_db_params["type"] 

701 new_db_params = new_field.db_parameters(connection=self.connection) 

702 new_type = new_db_params["type"] 

703 if (old_type is None and old_field.remote_field is None) or ( 

704 new_type is None and new_field.remote_field is None 

705 ): 

706 raise ValueError( 

707 f"Cannot alter field {old_field} into {new_field} - they do not properly define " 

708 "db_type (are you using a badly-written custom field?)", 

709 ) 

710 elif ( 

711 old_type is None 

712 and new_type is None 

713 and ( 

714 old_field.remote_field.through 

715 and new_field.remote_field.through 

716 and old_field.remote_field.through._meta.auto_created 

717 and new_field.remote_field.through._meta.auto_created 

718 ) 

719 ): 

720 return self._alter_many_to_many(model, old_field, new_field, strict) 

721 elif ( 

722 old_type is None 

723 and new_type is None 

724 and ( 

725 old_field.remote_field.through 

726 and new_field.remote_field.through 

727 and not old_field.remote_field.through._meta.auto_created 

728 and not new_field.remote_field.through._meta.auto_created 

729 ) 

730 ): 

731 # Both sides have through models; this is a no-op. 

732 return 

733 elif old_type is None or new_type is None: 

734 raise ValueError( 

735 f"Cannot alter field {old_field} into {new_field} - they are not compatible types " 

736 "(you cannot alter to or from M2M fields, or add or remove " 

737 "through= on M2M fields)" 

738 ) 

739 

740 self._alter_field( 

741 model, 

742 old_field, 

743 new_field, 

744 old_type, 

745 new_type, 

746 old_db_params, 

747 new_db_params, 

748 strict, 

749 ) 

750 

751 def _field_db_check(self, field, field_db_params): 

752 # Always check constraints with the same mocked column name to avoid 

753 # recreating constrains when the column is renamed. 

754 check_constraints = self.connection.data_type_check_constraints 

755 data = field.db_type_parameters(self.connection) 

756 data["column"] = "__column_name__" 

757 try: 

758 return check_constraints[field.get_internal_type()] % data 

759 except KeyError: 

760 return None 

761 

762 def _alter_field( 

763 self, 

764 model, 

765 old_field, 

766 new_field, 

767 old_type, 

768 new_type, 

769 old_db_params, 

770 new_db_params, 

771 strict=False, 

772 ): 

773 """Perform a "physical" (non-ManyToMany) field update.""" 

774 # Drop any FK constraints, we'll remake them later 

775 fks_dropped = set() 

776 if ( 

777 self.connection.features.supports_foreign_keys 

778 and old_field.remote_field 

779 and old_field.db_constraint 

780 and self._field_should_be_altered( 

781 old_field, 

782 new_field, 

783 ignore={"db_comment"}, 

784 ) 

785 ): 

786 fk_names = self._constraint_names( 

787 model, [old_field.column], foreign_key=True 

788 ) 

789 if strict and len(fk_names) != 1: 

790 raise ValueError( 

791 f"Found wrong number ({len(fk_names)}) of foreign key constraints for {model._meta.db_table}.{old_field.column}" 

792 ) 

793 for fk_name in fk_names: 

794 fks_dropped.add((old_field.column,)) 

795 self.execute(self._delete_fk_sql(model, fk_name)) 

796 # Has unique been removed? 

797 if old_field.unique and ( 

798 not new_field.unique or self._field_became_primary_key(old_field, new_field) 

799 ): 

800 # Find the unique constraint for this field 

801 meta_constraint_names = { 

802 constraint.name for constraint in model._meta.constraints 

803 } 

804 constraint_names = self._constraint_names( 

805 model, 

806 [old_field.column], 

807 unique=True, 

808 primary_key=False, 

809 exclude=meta_constraint_names, 

810 ) 

811 if strict and len(constraint_names) != 1: 

812 raise ValueError( 

813 f"Found wrong number ({len(constraint_names)}) of unique constraints for {model._meta.db_table}.{old_field.column}" 

814 ) 

815 for constraint_name in constraint_names: 

816 self.execute(self._delete_unique_sql(model, constraint_name)) 

817 # Drop incoming FK constraints if the field is a primary key or unique, 

818 # which might be a to_field target, and things are going to change. 

819 old_collation = old_db_params.get("collation") 

820 new_collation = new_db_params.get("collation") 

821 drop_foreign_keys = ( 

822 self.connection.features.supports_foreign_keys 

823 and ( 

824 (old_field.primary_key and new_field.primary_key) 

825 or (old_field.unique and new_field.unique) 

826 ) 

827 and ((old_type != new_type) or (old_collation != new_collation)) 

828 ) 

829 if drop_foreign_keys: 

830 # '_meta.related_field' also contains M2M reverse fields, these 

831 # will be filtered out 

832 for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field): 

833 rel_fk_names = self._constraint_names( 

834 new_rel.related_model, [new_rel.field.column], foreign_key=True 

835 ) 

836 for fk_name in rel_fk_names: 

837 self.execute(self._delete_fk_sql(new_rel.related_model, fk_name)) 

838 # Removed an index? (no strict check, as multiple indexes are possible) 

839 # Remove indexes if db_index switched to False or a unique constraint 

840 # will now be used in lieu of an index. The following lines from the 

841 # truth table show all True cases; the rest are False: 

842 # 

843 # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique 

844 # ------------------------------------------------------------------------------ 

845 # True | False | False | False 

846 # True | False | False | True 

847 # True | False | True | True 

848 if ( 

849 old_field.db_index 

850 and not old_field.unique 

851 and (not new_field.db_index or new_field.unique) 

852 ): 

853 # Find the index for this field 

854 meta_index_names = {index.name for index in model._meta.indexes} 

855 # Retrieve only BTREE indexes since this is what's created with 

856 # db_index=True. 

857 index_names = self._constraint_names( 

858 model, 

859 [old_field.column], 

860 index=True, 

861 type_=Index.suffix, 

862 exclude=meta_index_names, 

863 ) 

864 for index_name in index_names: 

865 # The only way to check if an index was created with 

866 # db_index=True or with Index(['field'], name='foo') 

867 # is to look at its name (refs #28053). 

868 self.execute(self._delete_index_sql(model, index_name)) 

869 # Change check constraints? 

870 old_db_check = self._field_db_check(old_field, old_db_params) 

871 new_db_check = self._field_db_check(new_field, new_db_params) 

872 if old_db_check != new_db_check and old_db_check: 

873 meta_constraint_names = { 

874 constraint.name for constraint in model._meta.constraints 

875 } 

876 constraint_names = self._constraint_names( 

877 model, 

878 [old_field.column], 

879 check=True, 

880 exclude=meta_constraint_names, 

881 ) 

882 if strict and len(constraint_names) != 1: 

883 raise ValueError( 

884 f"Found wrong number ({len(constraint_names)}) of check constraints for {model._meta.db_table}.{old_field.column}" 

885 ) 

886 for constraint_name in constraint_names: 

887 self.execute(self._delete_check_sql(model, constraint_name)) 

888 # Have they renamed the column? 

889 if old_field.column != new_field.column: 

890 self.execute( 

891 self._rename_field_sql( 

892 model._meta.db_table, old_field, new_field, new_type 

893 ) 

894 ) 

895 # Rename all references to the renamed column. 

896 for sql in self.deferred_sql: 

897 if isinstance(sql, Statement): 

898 sql.rename_column_references( 

899 model._meta.db_table, old_field.column, new_field.column 

900 ) 

901 # Next, start accumulating actions to do 

902 actions = [] 

903 null_actions = [] 

904 post_actions = [] 

905 # Type suffix change? (e.g. auto increment). 

906 old_type_suffix = old_field.db_type_suffix(connection=self.connection) 

907 new_type_suffix = new_field.db_type_suffix(connection=self.connection) 

908 # Type, collation, or comment change? 

909 if ( 

910 old_type != new_type 

911 or old_type_suffix != new_type_suffix 

912 or old_collation != new_collation 

913 or ( 

914 self.connection.features.supports_comments 

915 and old_field.db_comment != new_field.db_comment 

916 ) 

917 ): 

918 fragment, other_actions = self._alter_column_type_sql( 

919 model, old_field, new_field, new_type, old_collation, new_collation 

920 ) 

921 actions.append(fragment) 

922 post_actions.extend(other_actions) 

923 # When changing a column NULL constraint to NOT NULL with a given 

924 # default value, we need to perform 4 steps: 

925 # 1. Add a default for new incoming writes 

926 # 2. Update existing NULL rows with new default 

927 # 3. Replace NULL constraint with NOT NULL 

928 # 4. Drop the default again. 

929 # Default change? 

930 needs_database_default = False 

931 if old_field.null and not new_field.null: 

932 old_default = self.effective_default(old_field) 

933 new_default = self.effective_default(new_field) 

934 if ( 

935 not self.skip_default_on_alter(new_field) 

936 and old_default != new_default 

937 and new_default is not None 

938 ): 

939 needs_database_default = True 

940 actions.append( 

941 self._alter_column_default_sql(model, old_field, new_field) 

942 ) 

943 # Nullability change? 

944 if old_field.null != new_field.null: 

945 fragment = self._alter_column_null_sql(model, old_field, new_field) 

946 if fragment: 

947 null_actions.append(fragment) 

948 # Only if we have a default and there is a change from NULL to NOT NULL 

949 four_way_default_alteration = new_field.has_default() and ( 

950 old_field.null and not new_field.null 

951 ) 

952 if actions or null_actions: 

953 if not four_way_default_alteration: 

954 # If we don't have to do a 4-way default alteration we can 

955 # directly run a (NOT) NULL alteration 

956 actions += null_actions 

957 # Combine actions together if we can (e.g. postgres) 

958 if self.connection.features.supports_combined_alters and actions: 

959 sql, params = tuple(zip(*actions)) 

960 actions = [(", ".join(sql), sum(params, []))] 

961 # Apply those actions 

962 for sql, params in actions: 

963 self.execute( 

964 self.sql_alter_column 

965 % { 

966 "table": self.quote_name(model._meta.db_table), 

967 "changes": sql, 

968 }, 

969 params, 

970 ) 

971 if four_way_default_alteration: 

972 # Update existing rows with default value 

973 self.execute( 

974 self.sql_update_with_default 

975 % { 

976 "table": self.quote_name(model._meta.db_table), 

977 "column": self.quote_name(new_field.column), 

978 "default": "%s", 

979 }, 

980 [new_default], 

981 ) 

982 # Since we didn't run a NOT NULL change before we need to do it 

983 # now 

984 for sql, params in null_actions: 

985 self.execute( 

986 self.sql_alter_column 

987 % { 

988 "table": self.quote_name(model._meta.db_table), 

989 "changes": sql, 

990 }, 

991 params, 

992 ) 

993 if post_actions: 

994 for sql, params in post_actions: 

995 self.execute(sql, params) 

996 # If primary_key changed to False, delete the primary key constraint. 

997 if old_field.primary_key and not new_field.primary_key: 

998 self._delete_primary_key(model, strict) 

999 # Added a unique? 

1000 if self._unique_should_be_added(old_field, new_field): 

1001 self.execute(self._create_unique_sql(model, [new_field])) 

1002 # Added an index? Add an index if db_index switched to True or a unique 

1003 # constraint will no longer be used in lieu of an index. The following 

1004 # lines from the truth table show all True cases; the rest are False: 

1005 # 

1006 # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique 

1007 # ------------------------------------------------------------------------------ 

1008 # False | False | True | False 

1009 # False | True | True | False 

1010 # True | True | True | False 

1011 if ( 

1012 (not old_field.db_index or old_field.unique) 

1013 and new_field.db_index 

1014 and not new_field.unique 

1015 ): 

1016 self.execute(self._create_index_sql(model, fields=[new_field])) 

1017 # Type alteration on primary key? Then we need to alter the column 

1018 # referring to us. 

1019 rels_to_update = [] 

1020 if drop_foreign_keys: 

1021 rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) 

1022 # Changed to become primary key? 

1023 if self._field_became_primary_key(old_field, new_field): 

1024 # Make the new one 

1025 self.execute(self._create_primary_key_sql(model, new_field)) 

1026 # Update all referencing columns 

1027 rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) 

1028 # Handle our type alters on the other end of rels from the PK stuff above 

1029 for old_rel, new_rel in rels_to_update: 

1030 rel_db_params = new_rel.field.db_parameters(connection=self.connection) 

1031 rel_type = rel_db_params["type"] 

1032 rel_collation = rel_db_params.get("collation") 

1033 old_rel_db_params = old_rel.field.db_parameters(connection=self.connection) 

1034 old_rel_collation = old_rel_db_params.get("collation") 

1035 fragment, other_actions = self._alter_column_type_sql( 

1036 new_rel.related_model, 

1037 old_rel.field, 

1038 new_rel.field, 

1039 rel_type, 

1040 old_rel_collation, 

1041 rel_collation, 

1042 ) 

1043 self.execute( 

1044 self.sql_alter_column 

1045 % { 

1046 "table": self.quote_name(new_rel.related_model._meta.db_table), 

1047 "changes": fragment[0], 

1048 }, 

1049 fragment[1], 

1050 ) 

1051 for sql, params in other_actions: 

1052 self.execute(sql, params) 

1053 # Does it have a foreign key? 

1054 if ( 

1055 self.connection.features.supports_foreign_keys 

1056 and new_field.remote_field 

1057 and ( 

1058 fks_dropped or not old_field.remote_field or not old_field.db_constraint 

1059 ) 

1060 and new_field.db_constraint 

1061 ): 

1062 self.execute( 

1063 self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s") 

1064 ) 

1065 # Rebuild FKs that pointed to us if we previously had to drop them 

1066 if drop_foreign_keys: 

1067 for _, rel in rels_to_update: 

1068 if rel.field.db_constraint: 

1069 self.execute( 

1070 self._create_fk_sql(rel.related_model, rel.field, "_fk") 

1071 ) 

1072 # Does it have check constraints we need to add? 

1073 if old_db_check != new_db_check and new_db_check: 

1074 constraint_name = self._create_index_name( 

1075 model._meta.db_table, [new_field.column], suffix="_check" 

1076 ) 

1077 self.execute( 

1078 self._create_check_sql(model, constraint_name, new_db_params["check"]) 

1079 ) 

1080 # Drop the default if we need to 

1081 # (Plain usually does not use in-database defaults) 

1082 if needs_database_default: 

1083 changes_sql, params = self._alter_column_default_sql( 

1084 model, old_field, new_field, drop=True 

1085 ) 

1086 sql = self.sql_alter_column % { 

1087 "table": self.quote_name(model._meta.db_table), 

1088 "changes": changes_sql, 

1089 } 

1090 self.execute(sql, params) 

1091 # Reset connection if required 

1092 if self.connection.features.connection_persists_old_columns: 

1093 self.connection.close() 

1094 

1095 def _alter_column_null_sql(self, model, old_field, new_field): 

1096 """ 

1097 Hook to specialize column null alteration. 

1098 

1099 Return a (sql, params) fragment to set a column to null or non-null 

1100 as required by new_field, or None if no changes are required. 

1101 """ 

1102 if ( 

1103 self.connection.features.interprets_empty_strings_as_nulls 

1104 and new_field.empty_strings_allowed 

1105 ): 

1106 # The field is nullable in the database anyway, leave it alone. 

1107 return 

1108 else: 

1109 new_db_params = new_field.db_parameters(connection=self.connection) 

1110 sql = ( 

1111 self.sql_alter_column_null 

1112 if new_field.null 

1113 else self.sql_alter_column_not_null 

1114 ) 

1115 return ( 

1116 sql 

1117 % { 

1118 "column": self.quote_name(new_field.column), 

1119 "type": new_db_params["type"], 

1120 }, 

1121 [], 

1122 ) 

1123 

1124 def _alter_column_default_sql(self, model, old_field, new_field, drop=False): 

1125 """ 

1126 Hook to specialize column default alteration. 

1127 

1128 Return a (sql, params) fragment to add or drop (depending on the drop 

1129 argument) a default to new_field's column. 

1130 """ 

1131 new_default = self.effective_default(new_field) 

1132 default = self._column_default_sql(new_field) 

1133 params = [new_default] 

1134 

1135 if drop: 

1136 params = [] 

1137 elif self.connection.features.requires_literal_defaults: 

1138 # Some databases (Oracle) can't take defaults as a parameter 

1139 # If this is the case, the SchemaEditor for that database should 

1140 # implement prepare_default(). 

1141 default = self.prepare_default(new_default) 

1142 params = [] 

1143 

1144 new_db_params = new_field.db_parameters(connection=self.connection) 

1145 if drop: 

1146 if new_field.null: 

1147 sql = self.sql_alter_column_no_default_null 

1148 else: 

1149 sql = self.sql_alter_column_no_default 

1150 else: 

1151 sql = self.sql_alter_column_default 

1152 return ( 

1153 sql 

1154 % { 

1155 "column": self.quote_name(new_field.column), 

1156 "type": new_db_params["type"], 

1157 "default": default, 

1158 }, 

1159 params, 

1160 ) 

1161 

1162 def _alter_column_type_sql( 

1163 self, model, old_field, new_field, new_type, old_collation, new_collation 

1164 ): 

1165 """ 

1166 Hook to specialize column type alteration for different backends, 

1167 for cases when a creation type is different to an alteration type 

1168 (e.g. SERIAL in PostgreSQL, PostGIS fields). 

1169 

1170 Return a two-tuple of: an SQL fragment of (sql, params) to insert into 

1171 an ALTER TABLE statement and a list of extra (sql, params) tuples to 

1172 run once the field is altered. 

1173 """ 

1174 other_actions = [] 

1175 if collate_sql := self._collate_sql( 

1176 new_collation, old_collation, model._meta.db_table 

1177 ): 

1178 collate_sql = f" {collate_sql}" 

1179 else: 

1180 collate_sql = "" 

1181 # Comment change? 

1182 comment_sql = "" 

1183 if self.connection.features.supports_comments and not new_field.many_to_many: 

1184 if old_field.db_comment != new_field.db_comment: 

1185 # PostgreSQL and Oracle can't execute 'ALTER COLUMN ...' and 

1186 # 'COMMENT ON ...' at the same time. 

1187 sql, params = self._alter_column_comment_sql( 

1188 model, new_field, new_type, new_field.db_comment 

1189 ) 

1190 if sql: 

1191 other_actions.append((sql, params)) 

1192 if new_field.db_comment: 

1193 comment_sql = self._comment_sql(new_field.db_comment) 

1194 return ( 

1195 ( 

1196 self.sql_alter_column_type 

1197 % { 

1198 "column": self.quote_name(new_field.column), 

1199 "type": new_type, 

1200 "collation": collate_sql, 

1201 "comment": comment_sql, 

1202 }, 

1203 [], 

1204 ), 

1205 other_actions, 

1206 ) 

1207 

1208 def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment): 

1209 return ( 

1210 self.sql_alter_column_comment 

1211 % { 

1212 "table": self.quote_name(model._meta.db_table), 

1213 "column": self.quote_name(new_field.column), 

1214 "comment": self._comment_sql(new_db_comment), 

1215 }, 

1216 [], 

1217 ) 

1218 

1219 def _comment_sql(self, comment): 

1220 return self.quote_value(comment or "") 

1221 

1222 def _alter_many_to_many(self, model, old_field, new_field, strict): 

1223 """Alter M2Ms to repoint their to= endpoints.""" 

1224 # Rename the through table 

1225 if ( 

1226 old_field.remote_field.through._meta.db_table 

1227 != new_field.remote_field.through._meta.db_table 

1228 ): 

1229 self.alter_db_table( 

1230 old_field.remote_field.through, 

1231 old_field.remote_field.through._meta.db_table, 

1232 new_field.remote_field.through._meta.db_table, 

1233 ) 

1234 # Repoint the FK to the other side 

1235 self.alter_field( 

1236 new_field.remote_field.through, 

1237 # The field that points to the target model is needed, so we can 

1238 # tell alter_field to change it - this is m2m_reverse_field_name() 

1239 # (as opposed to m2m_field_name(), which points to our model). 

1240 old_field.remote_field.through._meta.get_field( 

1241 old_field.m2m_reverse_field_name() 

1242 ), 

1243 new_field.remote_field.through._meta.get_field( 

1244 new_field.m2m_reverse_field_name() 

1245 ), 

1246 ) 

1247 self.alter_field( 

1248 new_field.remote_field.through, 

1249 # for self-referential models we need to alter field from the other end too 

1250 old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()), 

1251 new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()), 

1252 ) 

1253 

1254 def _create_index_name(self, table_name, column_names, suffix=""): 

1255 """ 

1256 Generate a unique name for an index/unique constraint. 

1257 

1258 The name is divided into 3 parts: the table name, the column names, 

1259 and a unique digest and suffix. 

1260 """ 

1261 _, table_name = split_identifier(table_name) 

1262 hash_suffix_part = ( 

1263 f"{names_digest(table_name, *column_names, length=8)}{suffix}" 

1264 ) 

1265 max_length = self.connection.ops.max_name_length() or 200 

1266 # If everything fits into max_length, use that name. 

1267 index_name = "{}_{}_{}".format( 

1268 table_name, "_".join(column_names), hash_suffix_part 

1269 ) 

1270 if len(index_name) <= max_length: 

1271 return index_name 

1272 # Shorten a long suffix. 

1273 if len(hash_suffix_part) > max_length / 3: 

1274 hash_suffix_part = hash_suffix_part[: max_length // 3] 

1275 other_length = (max_length - len(hash_suffix_part)) // 2 - 1 

1276 index_name = "{}_{}_{}".format( 

1277 table_name[:other_length], 

1278 "_".join(column_names)[:other_length], 

1279 hash_suffix_part, 

1280 ) 

1281 # Prepend D if needed to prevent the name from starting with an 

1282 # underscore or a number (not permitted on Oracle). 

1283 if index_name[0] == "_" or index_name[0].isdigit(): 

1284 index_name = f"D{index_name[:-1]}" 

1285 return index_name 

1286 

1287 def _get_index_tablespace_sql(self, model, fields, db_tablespace=None): 

1288 if db_tablespace is None: 

1289 if len(fields) == 1 and fields[0].db_tablespace: 

1290 db_tablespace = fields[0].db_tablespace 

1291 elif settings.DEFAULT_INDEX_TABLESPACE: 

1292 db_tablespace = settings.DEFAULT_INDEX_TABLESPACE 

1293 elif model._meta.db_tablespace: 

1294 db_tablespace = model._meta.db_tablespace 

1295 if db_tablespace is not None: 

1296 return " " + self.connection.ops.tablespace_sql(db_tablespace) 

1297 return "" 

1298 

1299 def _index_condition_sql(self, condition): 

1300 if condition: 

1301 return " WHERE " + condition 

1302 return "" 

1303 

1304 def _index_include_sql(self, model, columns): 

1305 if not columns or not self.connection.features.supports_covering_indexes: 

1306 return "" 

1307 return Statement( 

1308 " INCLUDE (%(columns)s)", 

1309 columns=Columns(model._meta.db_table, columns, self.quote_name), 

1310 ) 

1311 

1312 def _create_index_sql( 

1313 self, 

1314 model, 

1315 *, 

1316 fields=None, 

1317 name=None, 

1318 suffix="", 

1319 using="", 

1320 db_tablespace=None, 

1321 col_suffixes=(), 

1322 sql=None, 

1323 opclasses=(), 

1324 condition=None, 

1325 include=None, 

1326 expressions=None, 

1327 ): 

1328 """ 

1329 Return the SQL statement to create the index for one or several fields 

1330 or expressions. `sql` can be specified if the syntax differs from the 

1331 standard (GIS indexes, ...). 

1332 """ 

1333 fields = fields or [] 

1334 expressions = expressions or [] 

1335 compiler = Query(model, alias_cols=False).get_compiler( 

1336 connection=self.connection, 

1337 ) 

1338 tablespace_sql = self._get_index_tablespace_sql( 

1339 model, fields, db_tablespace=db_tablespace 

1340 ) 

1341 columns = [field.column for field in fields] 

1342 sql_create_index = sql or self.sql_create_index 

1343 table = model._meta.db_table 

1344 

1345 def create_index_name(*args, **kwargs): 

1346 nonlocal name 

1347 if name is None: 

1348 name = self._create_index_name(*args, **kwargs) 

1349 return self.quote_name(name) 

1350 

1351 return Statement( 

1352 sql_create_index, 

1353 table=Table(table, self.quote_name), 

1354 name=IndexName(table, columns, suffix, create_index_name), 

1355 using=using, 

1356 columns=( 

1357 self._index_columns(table, columns, col_suffixes, opclasses) 

1358 if columns 

1359 else Expressions(table, expressions, compiler, self.quote_value) 

1360 ), 

1361 extra=tablespace_sql, 

1362 condition=self._index_condition_sql(condition), 

1363 include=self._index_include_sql(model, include), 

1364 ) 

1365 

1366 def _delete_index_sql(self, model, name, sql=None): 

1367 return Statement( 

1368 sql or self.sql_delete_index, 

1369 table=Table(model._meta.db_table, self.quote_name), 

1370 name=self.quote_name(name), 

1371 ) 

1372 

1373 def _rename_index_sql(self, model, old_name, new_name): 

1374 return Statement( 

1375 self.sql_rename_index, 

1376 table=Table(model._meta.db_table, self.quote_name), 

1377 old_name=self.quote_name(old_name), 

1378 new_name=self.quote_name(new_name), 

1379 ) 

1380 

1381 def _index_columns(self, table, columns, col_suffixes, opclasses): 

1382 return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes) 

1383 

1384 def _model_indexes_sql(self, model): 

1385 """ 

1386 Return a list of all index SQL statements (field indexes, Meta.indexes) for the specified model. 

1387 """ 

1388 if not model._meta.managed or model._meta.swapped: 

1389 return [] 

1390 output = [] 

1391 for field in model._meta.local_fields: 

1392 output.extend(self._field_indexes_sql(model, field)) 

1393 

1394 for index in model._meta.indexes: 

1395 if ( 

1396 not index.contains_expressions 

1397 or self.connection.features.supports_expression_indexes 

1398 ): 

1399 output.append(index.create_sql(model, self)) 

1400 return output 

1401 

1402 def _field_indexes_sql(self, model, field): 

1403 """ 

1404 Return a list of all index SQL statements for the specified field. 

1405 """ 

1406 output = [] 

1407 if self._field_should_be_indexed(model, field): 

1408 output.append(self._create_index_sql(model, fields=[field])) 

1409 return output 

1410 

1411 def _field_should_be_altered(self, old_field, new_field, ignore=None): 

1412 ignore = ignore or set() 

1413 _, old_path, old_args, old_kwargs = old_field.deconstruct() 

1414 _, new_path, new_args, new_kwargs = new_field.deconstruct() 

1415 # Don't alter when: 

1416 # - changing only a field name 

1417 # - changing an attribute that doesn't affect the schema 

1418 # - changing an attribute in the provided set of ignored attributes 

1419 # - adding only a db_column and the column name is not changed 

1420 for attr in ignore.union(old_field.non_db_attrs): 

1421 old_kwargs.pop(attr, None) 

1422 for attr in ignore.union(new_field.non_db_attrs): 

1423 new_kwargs.pop(attr, None) 

1424 return self.quote_name(old_field.column) != self.quote_name( 

1425 new_field.column 

1426 ) or (old_path, old_args, old_kwargs) != (new_path, new_args, new_kwargs) 

1427 

1428 def _field_should_be_indexed(self, model, field): 

1429 return field.db_index and not field.unique 

1430 

1431 def _field_became_primary_key(self, old_field, new_field): 

1432 return not old_field.primary_key and new_field.primary_key 

1433 

1434 def _unique_should_be_added(self, old_field, new_field): 

1435 return ( 

1436 not new_field.primary_key 

1437 and new_field.unique 

1438 and (not old_field.unique or old_field.primary_key) 

1439 ) 

1440 

1441 def _rename_field_sql(self, table, old_field, new_field, new_type): 

1442 return self.sql_rename_column % { 

1443 "table": self.quote_name(table), 

1444 "old_column": self.quote_name(old_field.column), 

1445 "new_column": self.quote_name(new_field.column), 

1446 "type": new_type, 

1447 } 

1448 

1449 def _create_fk_sql(self, model, field, suffix): 

1450 table = Table(model._meta.db_table, self.quote_name) 

1451 name = self._fk_constraint_name(model, field, suffix) 

1452 column = Columns(model._meta.db_table, [field.column], self.quote_name) 

1453 to_table = Table(field.target_field.model._meta.db_table, self.quote_name) 

1454 to_column = Columns( 

1455 field.target_field.model._meta.db_table, 

1456 [field.target_field.column], 

1457 self.quote_name, 

1458 ) 

1459 deferrable = self.connection.ops.deferrable_sql() 

1460 return Statement( 

1461 self.sql_create_fk, 

1462 table=table, 

1463 name=name, 

1464 column=column, 

1465 to_table=to_table, 

1466 to_column=to_column, 

1467 deferrable=deferrable, 

1468 ) 

1469 

1470 def _fk_constraint_name(self, model, field, suffix): 

1471 def create_fk_name(*args, **kwargs): 

1472 return self.quote_name(self._create_index_name(*args, **kwargs)) 

1473 

1474 return ForeignKeyName( 

1475 model._meta.db_table, 

1476 [field.column], 

1477 split_identifier(field.target_field.model._meta.db_table)[1], 

1478 [field.target_field.column], 

1479 suffix, 

1480 create_fk_name, 

1481 ) 

1482 

1483 def _delete_fk_sql(self, model, name): 

1484 return self._delete_constraint_sql(self.sql_delete_fk, model, name) 

1485 

1486 def _deferrable_constraint_sql(self, deferrable): 

1487 if deferrable is None: 

1488 return "" 

1489 if deferrable == Deferrable.DEFERRED: 

1490 return " DEFERRABLE INITIALLY DEFERRED" 

1491 if deferrable == Deferrable.IMMEDIATE: 

1492 return " DEFERRABLE INITIALLY IMMEDIATE" 

1493 

1494 def _unique_sql( 

1495 self, 

1496 model, 

1497 fields, 

1498 name, 

1499 condition=None, 

1500 deferrable=None, 

1501 include=None, 

1502 opclasses=None, 

1503 expressions=None, 

1504 ): 

1505 if ( 

1506 deferrable 

1507 and not self.connection.features.supports_deferrable_unique_constraints 

1508 ): 

1509 return None 

1510 if condition or include or opclasses or expressions: 

1511 # Databases support conditional, covering, and functional unique 

1512 # constraints via a unique index. 

1513 sql = self._create_unique_sql( 

1514 model, 

1515 fields, 

1516 name=name, 

1517 condition=condition, 

1518 include=include, 

1519 opclasses=opclasses, 

1520 expressions=expressions, 

1521 ) 

1522 if sql: 

1523 self.deferred_sql.append(sql) 

1524 return None 

1525 constraint = self.sql_unique_constraint % { 

1526 "columns": ", ".join([self.quote_name(field.column) for field in fields]), 

1527 "deferrable": self._deferrable_constraint_sql(deferrable), 

1528 } 

1529 return self.sql_constraint % { 

1530 "name": self.quote_name(name), 

1531 "constraint": constraint, 

1532 } 

1533 

1534 def _create_unique_sql( 

1535 self, 

1536 model, 

1537 fields, 

1538 name=None, 

1539 condition=None, 

1540 deferrable=None, 

1541 include=None, 

1542 opclasses=None, 

1543 expressions=None, 

1544 ): 

1545 if ( 

1546 ( 

1547 deferrable 

1548 and not self.connection.features.supports_deferrable_unique_constraints 

1549 ) 

1550 or (condition and not self.connection.features.supports_partial_indexes) 

1551 or (include and not self.connection.features.supports_covering_indexes) 

1552 or ( 

1553 expressions and not self.connection.features.supports_expression_indexes 

1554 ) 

1555 ): 

1556 return None 

1557 

1558 compiler = Query(model, alias_cols=False).get_compiler( 

1559 connection=self.connection 

1560 ) 

1561 table = model._meta.db_table 

1562 columns = [field.column for field in fields] 

1563 if name is None: 

1564 name = self._unique_constraint_name(table, columns, quote=True) 

1565 else: 

1566 name = self.quote_name(name) 

1567 if condition or include or opclasses or expressions: 

1568 sql = self.sql_create_unique_index 

1569 else: 

1570 sql = self.sql_create_unique 

1571 if columns: 

1572 columns = self._index_columns( 

1573 table, columns, col_suffixes=(), opclasses=opclasses 

1574 ) 

1575 else: 

1576 columns = Expressions(table, expressions, compiler, self.quote_value) 

1577 return Statement( 

1578 sql, 

1579 table=Table(table, self.quote_name), 

1580 name=name, 

1581 columns=columns, 

1582 condition=self._index_condition_sql(condition), 

1583 deferrable=self._deferrable_constraint_sql(deferrable), 

1584 include=self._index_include_sql(model, include), 

1585 ) 

1586 

1587 def _unique_constraint_name(self, table, columns, quote=True): 

1588 if quote: 

1589 

1590 def create_unique_name(*args, **kwargs): 

1591 return self.quote_name(self._create_index_name(*args, **kwargs)) 

1592 

1593 else: 

1594 create_unique_name = self._create_index_name 

1595 

1596 return IndexName(table, columns, "_uniq", create_unique_name) 

1597 

1598 def _delete_unique_sql( 

1599 self, 

1600 model, 

1601 name, 

1602 condition=None, 

1603 deferrable=None, 

1604 include=None, 

1605 opclasses=None, 

1606 expressions=None, 

1607 ): 

1608 if ( 

1609 ( 

1610 deferrable 

1611 and not self.connection.features.supports_deferrable_unique_constraints 

1612 ) 

1613 or (condition and not self.connection.features.supports_partial_indexes) 

1614 or (include and not self.connection.features.supports_covering_indexes) 

1615 or ( 

1616 expressions and not self.connection.features.supports_expression_indexes 

1617 ) 

1618 ): 

1619 return None 

1620 if condition or include or opclasses or expressions: 

1621 sql = self.sql_delete_index 

1622 else: 

1623 sql = self.sql_delete_unique 

1624 return self._delete_constraint_sql(sql, model, name) 

1625 

1626 def _check_sql(self, name, check): 

1627 return self.sql_constraint % { 

1628 "name": self.quote_name(name), 

1629 "constraint": self.sql_check_constraint % {"check": check}, 

1630 } 

1631 

1632 def _create_check_sql(self, model, name, check): 

1633 if not self.connection.features.supports_table_check_constraints: 

1634 return None 

1635 return Statement( 

1636 self.sql_create_check, 

1637 table=Table(model._meta.db_table, self.quote_name), 

1638 name=self.quote_name(name), 

1639 check=check, 

1640 ) 

1641 

1642 def _delete_check_sql(self, model, name): 

1643 if not self.connection.features.supports_table_check_constraints: 

1644 return None 

1645 return self._delete_constraint_sql(self.sql_delete_check, model, name) 

1646 

1647 def _delete_constraint_sql(self, template, model, name): 

1648 return Statement( 

1649 template, 

1650 table=Table(model._meta.db_table, self.quote_name), 

1651 name=self.quote_name(name), 

1652 ) 

1653 

1654 def _constraint_names( 

1655 self, 

1656 model, 

1657 column_names=None, 

1658 unique=None, 

1659 primary_key=None, 

1660 index=None, 

1661 foreign_key=None, 

1662 check=None, 

1663 type_=None, 

1664 exclude=None, 

1665 ): 

1666 """Return all constraint names matching the columns and conditions.""" 

1667 if column_names is not None: 

1668 column_names = [ 

1669 self.connection.introspection.identifier_converter( 

1670 truncate_name(name, self.connection.ops.max_name_length()) 

1671 ) 

1672 if self.connection.features.truncates_names 

1673 else self.connection.introspection.identifier_converter(name) 

1674 for name in column_names 

1675 ] 

1676 with self.connection.cursor() as cursor: 

1677 constraints = self.connection.introspection.get_constraints( 

1678 cursor, model._meta.db_table 

1679 ) 

1680 result = [] 

1681 for name, infodict in constraints.items(): 

1682 if column_names is None or column_names == infodict["columns"]: 

1683 if unique is not None and infodict["unique"] != unique: 

1684 continue 

1685 if primary_key is not None and infodict["primary_key"] != primary_key: 

1686 continue 

1687 if index is not None and infodict["index"] != index: 

1688 continue 

1689 if check is not None and infodict["check"] != check: 

1690 continue 

1691 if foreign_key is not None and not infodict["foreign_key"]: 

1692 continue 

1693 if type_ is not None and infodict["type"] != type_: 

1694 continue 

1695 if not exclude or name not in exclude: 

1696 result.append(name) 

1697 return result 

1698 

1699 def _delete_primary_key(self, model, strict=False): 

1700 constraint_names = self._constraint_names(model, primary_key=True) 

1701 if strict and len(constraint_names) != 1: 

1702 raise ValueError( 

1703 f"Found wrong number ({len(constraint_names)}) of PK constraints for {model._meta.db_table}" 

1704 ) 

1705 for constraint_name in constraint_names: 

1706 self.execute(self._delete_primary_key_sql(model, constraint_name)) 

1707 

1708 def _create_primary_key_sql(self, model, field): 

1709 return Statement( 

1710 self.sql_create_pk, 

1711 table=Table(model._meta.db_table, self.quote_name), 

1712 name=self.quote_name( 

1713 self._create_index_name( 

1714 model._meta.db_table, [field.column], suffix="_pk" 

1715 ) 

1716 ), 

1717 columns=Columns(model._meta.db_table, [field.column], self.quote_name), 

1718 ) 

1719 

1720 def _delete_primary_key_sql(self, model, name): 

1721 return self._delete_constraint_sql(self.sql_delete_pk, model, name) 

1722 

1723 def _collate_sql(self, collation, old_collation=None, table_name=None): 

1724 return "COLLATE " + self.quote_name(collation) if collation else "" 

1725 

1726 def remove_procedure(self, procedure_name, param_types=()): 

1727 sql = self.sql_delete_procedure % { 

1728 "procedure": self.quote_name(procedure_name), 

1729 "param_types": ",".join(param_types), 

1730 } 

1731 self.execute(sql)