forked from yesodweb/persistent
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathMigration.hs
More file actions
1183 lines (1112 loc) · 44.1 KB
/
Migration.hs
File metadata and controls
1183 lines (1112 loc) · 44.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Generate postgresql migrations for a set of EntityDefs, either from scratch
-- or based on the current state of a database.
module Database.Persist.Postgresql.Internal.Migration where
import Control.Arrow
import Control.Monad
import Control.Monad.Except
import Control.Monad.IO.Class
import Data.Acquire (with)
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Either (partitionEithers)
import Data.FileEmbed (embedFileRelative)
import Data.List as List
import qualified Data.List.NonEmpty as NEL
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Traversable
import Database.Persist.Sql
import qualified Database.Persist.Sql.Util as Util
-- | Returns a structured representation of all of the
-- DB changes required to migrate the Entity from its
-- current state in the database to the state described in
-- Haskell.
--
-- @since 2.17.1.0
migrateStructured
:: [EntityDef]
-> (Text -> IO Statement)
-> EntityDef
-> IO (Either [Text] [AlterDB])
migrateStructured allDefs getter entity =
migrateEntitiesStructured getter allDefs [entity]
-- | Returns a structured representation of all of the DB changes required to
-- migrate the listed entities from their current state in the database to the
-- state described in Haskell. This function avoids N+1 queries, so if you
-- have a lot of entities to migrate, it's much faster to use this rather than
-- using 'migrateStructured' in a loop.
--
-- @since 2.14.1.0
migrateEntitiesStructured
:: (Text -> IO Statement)
-> [EntityDef]
-> [EntityDef]
-> IO (Either [Text] [AlterDB])
migrateEntitiesStructured getStmt allDefs defsToMigrate = do
r <- collectSchemaState getStmt (map getEntityDBName defsToMigrate)
pure $ case r of
Right schemaState ->
migrateEntitiesFromSchemaState schemaState allDefs defsToMigrate
Left err ->
Left [err]
-- | Returns a structured representation of all of the
-- DB changes required to migrate the Entity to the state
-- described in Haskell, assuming it currently does not
-- exist in the database.
--
-- @since 2.17.1.0
mockMigrateStructured
:: [EntityDef]
-> EntityDef
-> [AlterDB]
mockMigrateStructured allDefs entity =
migrateEntityFromSchemaState EntityDoesNotExist allDefs entity
-- | In order to ensure that generating migrations is fast and avoids N+1
-- queries, we split it into two phases. The first phase involves querying the
-- database to gather all of the information we need about the existing schema.
-- The second phase then generates migrations based on the information from the
-- first phase. This data type represents all of the data that's gathered during
-- the first phase: information about the current state of the entities we're
-- migrating in the database.
newtype SchemaState = SchemaState (Map EntityNameDB EntitySchemaState)
deriving (Eq, Show)
-- | The state of a particular entity (i.e. table) in the database; we generate
-- migrations based on the diff of this versus an EntityDef.
data EntitySchemaState
= -- | The table does not exist in the database
EntityDoesNotExist
| -- | The table does exist in the database
EntityExists ExistingEntitySchemaState
deriving (Eq, Show)
-- | Information about an existing table in the database
data ExistingEntitySchemaState = ExistingEntitySchemaState
{ essColumns :: Map FieldNameDB (Column, (Set ColumnReference))
-- ^ The columns in this entity, together with the set of foreign key
-- constraints that they are subject to. Usually the ColumnReference list
-- will contain 0-1 elements, but in the event that there are multiple FK
-- constraints applying to a given column in the database we need to keep
-- track of them all because we don't yet know which one has the right name
-- (based on what is in the corresponding model's EntityDef).
--
-- Note that cReference will be unset for these columns, for the same reason:
-- there may be multiple FK constraints and we don't yet know which one to
-- use.
, essUniqueConstraints :: Map ConstraintNameDB [FieldNameDB]
-- ^ A map of unique constraint names to the columns that are affected by
-- those constraints.
}
deriving (Eq, Show)
-- | Query a database in order to assemble a SchemaState containing information
-- about each of the entities in the given list. Every entity name in the input
-- should be present in the returned Map.
collectSchemaState
:: (Text -> IO Statement) -> [EntityNameDB] -> IO (Either Text SchemaState)
collectSchemaState getStmt entityNames = runExceptT $ do
existence <- getTableExistence getStmt entityNames
columns <- getColumnsWithoutReferences getStmt entityNames
constraints <- getConstraints getStmt entityNames
foreignKeyReferences <- getForeignKeyReferences getStmt entityNames
fmap (SchemaState . Map.fromList) $
for entityNames $ \entityNameDB -> do
tableExists <- case Map.lookup entityNameDB existence of
Just e -> pure e
Nothing ->
throwError
("Missing entity name from existence map: " <> unEntityNameDB entityNameDB)
if tableExists
then do
essColumns <- case Map.lookup entityNameDB columns of
Just cols ->
pure $ Map.fromList $ flip map cols $ \c ->
( cName c
,
( c
, fromMaybe Set.empty $
Map.lookup (cName c) =<< Map.lookup entityNameDB foreignKeyReferences
)
)
Nothing ->
throwError
("Missing entity name from columns map: " <> unEntityNameDB entityNameDB)
let
essUniqueConstraints = fromMaybe Map.empty (Map.lookup entityNameDB constraints)
pure
( entityNameDB
, EntityExists $ ExistingEntitySchemaState{essColumns, essUniqueConstraints}
)
else
pure
( entityNameDB
, EntityDoesNotExist
)
runStmt
:: (Show a)
=> (Text -> IO Statement)
-> Text
-> [PersistValue]
-> ([PersistValue] -> a)
-> IO [a]
runStmt getStmt sql values process = do
stmt <- getStmt sql
results <-
with
(stmtQuery stmt values)
(\src -> runConduit $ src .| CL.map process .| CL.consume)
pure results
-- | Check for the existence of each of the input tables. The keys in the
-- returned Map are exactly the entity names in the argument; True means the
-- table exists.
getTableExistence
:: (Text -> IO Statement)
-> [EntityNameDB]
-> ExceptT Text IO (Map EntityNameDB Bool)
getTableExistence getStmt entityNames = do
results <-
liftIO $
runStmt
getStmt
getTableExistenceSql
[PersistArray (map (PersistText . unEntityNameDB) entityNames)]
processTable
case partitionEithers results of
([], xs) ->
let
existing = Set.fromList xs
in
pure $ Map.fromList $ map (\n -> (n, Set.member n existing)) entityNames
(errs, _) -> throwError (T.intercalate "\n" errs)
where
getTableExistenceSql =
"SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog'"
<> " AND schemaname != 'information_schema' AND tablename=ANY (?)"
processTable :: [PersistValue] -> Either Text EntityNameDB
processTable resultRow = do
fmap EntityNameDB $
case resultRow of
[PersistText tableName] ->
pure tableName
[PersistByteString tableName] ->
pure (T.decodeUtf8 tableName)
other ->
throwError $ T.pack $ "Invalid result from information_schema: " ++ show other
-- | Get all columns for the listed tables from the database, ignoring foreign
-- key references (those are filled in later).
getColumnsWithoutReferences
:: (Text -> IO Statement)
-> [EntityNameDB]
-> ExceptT Text IO (Map EntityNameDB [Column])
getColumnsWithoutReferences getStmt entityNames = do
results <-
liftIO $
runStmt
getStmt
getColumnsSql
[PersistArray (map (PersistText . unEntityNameDB) entityNames)]
processColumn
case partitionEithers results of
([], xs) -> pure $ Map.fromListWith (++) $ map (second (: [])) xs
(errs, _) -> throwError (T.intercalate "\n" errs)
where
getColumnsSql =
T.concat
[ "SELECT "
, "table_name "
, ",column_name "
, ",is_nullable "
, ",COALESCE(domain_name, udt_name)" -- See DOMAINS below
, ",column_default "
, ",generation_expression "
, ",numeric_precision "
, ",numeric_scale "
, ",character_maximum_length "
, "FROM information_schema.columns "
, "WHERE table_catalog=current_database() "
, "AND table_schema=current_schema() "
, "AND table_name=ANY (?) "
]
-- DOMAINS Postgres supports the concept of domains, which are data types
-- with optional constraints. An app might make an "email" domain over the
-- varchar type, with a CHECK that the emails are valid In this case the
-- generated SQL should use the domain name: ALTER TABLE users ALTER COLUMN
-- foo TYPE email This code exists to use the domain name (email), instead
-- of the underlying type (varchar). This is tested in
-- EquivalentTypeTest.hs
processColumn :: [PersistValue] -> Either Text (EntityNameDB, Column)
processColumn resultRow = do
case resultRow of
[ PersistText tableName
, PersistText columnName
, PersistText isNullable
, PersistText typeName
, defaultValue
, generationExpression
, numericPrecision
, numericScale
, maxlen
] -> mapLeft (addErrorContext tableName columnName) $ do
defaultValue' <-
case defaultValue of
PersistNull ->
pure Nothing
PersistText t ->
pure $ Just t
_ ->
throwError $ T.pack $ "Invalid default column: " ++ show defaultValue
generationExpression' <-
case generationExpression of
PersistNull ->
pure Nothing
PersistText t ->
pure $ Just t
_ ->
throwError $ T.pack $ "Invalid generated column: " ++ show generationExpression
let
typeStr =
case maxlen of
PersistInt64 n ->
T.concat [typeName, "(", T.pack (show n), ")"]
_ ->
typeName
t <- getType numericPrecision numericScale typeStr
pure
( EntityNameDB tableName
, Column
{ cName = FieldNameDB columnName
, cNull = isNullable == "YES"
, cSqlType = t
, cDefault = fmap stripSuffixes defaultValue'
, cGenerated = fmap stripSuffixes generationExpression'
, cDefaultConstraintName = Nothing
, cMaxLen = Nothing
, cReference = Nothing
}
)
other ->
Left $
T.pack $
"Invalid result from information_schema: " ++ show other
stripSuffixes t =
loop'
[ "::character varying"
, "::text"
]
where
loop' [] = t
loop' (p : ps) =
case T.stripSuffix p t of
Nothing -> loop' ps
Just t' -> t'
getType _ _ "int4" = pure SqlInt32
getType _ _ "int8" = pure SqlInt64
getType _ _ "varchar" = pure SqlString
getType _ _ "text" = pure SqlString
getType _ _ "date" = pure SqlDay
getType _ _ "bool" = pure SqlBool
getType _ _ "timestamptz" = pure SqlDayTime
getType _ _ "float4" = pure SqlReal
getType _ _ "float8" = pure SqlReal
getType _ _ "bytea" = pure SqlBlob
getType _ _ "time" = pure SqlTime
getType precision scale "numeric" = getNumeric precision scale
getType _ _ a = pure $ SqlOther a
getNumeric (PersistInt64 a) (PersistInt64 b) =
pure $ SqlNumeric (fromIntegral a) (fromIntegral b)
getNumeric PersistNull PersistNull =
throwError $
T.concat
[ "No precision and scale were specified. "
, "Postgres defaults to a maximum scale of 147,455 and precision of 16383,"
, " which is probably not what you intended."
, " Specify the values as numeric(total_digits, digits_after_decimal_place)."
]
getNumeric a b =
throwError $
T.concat
[ "Can not get numeric field precision. "
, "Expected an integer for both precision and scale, "
, "got: "
, T.pack $ show a
, " and "
, T.pack $ show b
, ", respectively."
, " Specify the values as numeric(total_digits, digits_after_decimal_place)."
]
-- cyclist putting a stick into his own wheel meme
addErrorContext :: Text -> Text -> Text -> Text
addErrorContext tableName columnName originalMsg =
T.concat
[ "Error in column "
, tableName
, "."
, columnName
, ": "
, originalMsg
]
-- | Get all constraints for the listed tables from the database, except for foreign
-- keys and primary keys (those go in the Column data type)
getConstraints
:: (Text -> IO Statement)
-> [EntityNameDB]
-> ExceptT Text IO (Map EntityNameDB (Map ConstraintNameDB [FieldNameDB]))
getConstraints getStmt entityNames = do
results <-
liftIO $
runStmt
getStmt
getConstraintsSql
[PersistArray (map (PersistText . unEntityNameDB) entityNames)]
processConstraint
case partitionEithers results of
([], xs) -> pure $ Map.unionsWith (Map.unionWith (<>)) xs
(errs, _) -> throwError (T.intercalate "\n" errs)
where
getConstraintsSql =
T.concat
[ "SELECT "
, "c.table_name, "
, "c.constraint_name, "
, "c.column_name "
, "FROM information_schema.key_column_usage AS c, "
, "information_schema.table_constraints AS k "
, "WHERE c.table_catalog=current_database() "
, "AND c.table_catalog=k.table_catalog "
, "AND c.table_schema=current_schema() "
, "AND c.table_schema=k.table_schema "
, "AND c.table_name=ANY (?) "
, "AND c.table_name=k.table_name "
, "AND c.constraint_name=k.constraint_name "
, "AND NOT k.constraint_type IN ('PRIMARY KEY', 'FOREIGN KEY') "
, "ORDER BY c.constraint_name, c.column_name"
]
processConstraint
:: [PersistValue]
-> Either Text (Map EntityNameDB (Map ConstraintNameDB [FieldNameDB]))
processConstraint resultRow = do
(tableName, constraintName, columnName) <- case resultRow of
[PersistText tab, PersistText con, PersistText col] ->
pure (tab, con, col)
[PersistByteString tab, PersistByteString con, PersistByteString col] ->
pure (T.decodeUtf8 tab, T.decodeUtf8 con, T.decodeUtf8 col)
o ->
throwError $ T.pack $ "unexpected datatype returned for postgres o=" ++ show o
pure $
Map.singleton
(EntityNameDB tableName)
(Map.singleton (ConstraintNameDB constraintName) [FieldNameDB columnName])
-- | Get foreign key constraint information for all columns in the supplied
-- tables from the database. We return a list of references per column because
-- there may be duplicate FK constraints in the database.
--
-- Note that we only care about FKs where the column in question has ordinal
-- position 1 i.e. is the first column appearing in the FK constraint.
-- Eventually we may want to fill this gap so that multi-column FK constraints
-- can be dealt with by this migrator, but for now that is not something that
-- persistent-postgresql handles.
getForeignKeyReferences
:: (Text -> IO Statement)
-> [EntityNameDB]
-> ExceptT Text IO (Map EntityNameDB (Map FieldNameDB (Set ColumnReference)))
getForeignKeyReferences getStmt entityNames = do
results <-
liftIO $
runStmt
getStmt
getForeignKeyReferencesSql
[PersistArray (map (PersistText . unEntityNameDB) entityNames)]
processForeignKeyReference
case partitionEithers results of
([], xs) -> pure $ Map.unionsWith (Map.unionWith Set.union) xs
(errs, _) -> throwError (T.intercalate "\n" errs)
where
getForeignKeyReferencesSql = T.decodeUtf8 $(embedFileRelative "sql/getForeignKeyReferences.sql")
processForeignKeyReference
:: [PersistValue]
-> Either Text (Map EntityNameDB (Map FieldNameDB (Set ColumnReference)))
processForeignKeyReference resultRow = do
( sourceTableName
, sourceColumnName
, refTableName
, constraintName
, updRule
, delRule
) <-
case resultRow of
[ PersistText constrName
, PersistText srcTable
, PersistText refTable
, PersistText srcColumn
, PersistText _refColumn
, PersistText updRule
, PersistText delRule
] ->
pure
( EntityNameDB srcTable
, FieldNameDB srcColumn
, EntityNameDB refTable
, ConstraintNameDB constrName
, updRule
, delRule
)
other ->
throwError $ T.pack $ "unexpected row returned for postgres: " ++ show other
fcOnUpdate <- parseCascade updRule
fcOnDelete <- parseCascade delRule
let
columnRef =
ColumnReference
{ crTableName = refTableName
, crConstraintName = constraintName
, crFieldCascade =
FieldCascade
{ fcOnUpdate = Just fcOnUpdate
, fcOnDelete = Just fcOnDelete
}
}
pure $
Map.singleton
sourceTableName
(Map.singleton sourceColumnName (Set.singleton columnRef))
-- Parse a cascade action as represented in pg_constraint
parseCascade :: Text -> Either Text CascadeAction
parseCascade txt =
case txt of
"a" ->
Right NoAction
"c" ->
Right Cascade
"n" ->
Right SetNull
"d" ->
Right SetDefault
"r" ->
Right Restrict
_ ->
Left $ "Unexpected value in parseCascade: " <> txt
mapLeft :: (a1 -> a2) -> Either a1 b -> Either a2 b
mapLeft _ (Right x) = Right x
mapLeft f (Left x) = Left (f x)
migrateEntitiesFromSchemaState
:: SchemaState
-> [EntityDef]
-> [EntityDef]
-> Either [Text] [AlterDB]
migrateEntitiesFromSchemaState (SchemaState schemaStateMap) allDefs defsToMigrate =
let
go :: EntityDef -> Either Text [AlterDB]
go entity = do
let
name = getEntityDBName entity
case Map.lookup name schemaStateMap of
Just entityState ->
Right $ migrateEntityFromSchemaState entityState allDefs entity
Nothing ->
Left $ T.pack $ "No entry for entity in schemaState: " <> show name
in
case partitionEithers (map go defsToMigrate) of
([], xs) -> Right (concat xs)
(errs, _) -> Left errs
migrateEntityFromSchemaState
:: EntitySchemaState
-> [EntityDef]
-> EntityDef
-> [AlterDB]
migrateEntityFromSchemaState schemaState allDefs entity =
case schemaState of
EntityDoesNotExist ->
(addTable newcols entity) : uniques ++ references ++ foreignsAlt
EntityExists ExistingEntitySchemaState{essColumns, essUniqueConstraints} ->
let
(acs, ats) =
getAlters
allDefs
entity
(newcols, udspair)
( map pickColumnReference (Map.elems essColumns)
, Map.toList essUniqueConstraints
)
acs' = map (AlterColumn name) acs
ats' = map (AlterTable name) ats
in
acs' ++ ats'
where
name = getEntityDBName entity
(newcols', udefs, fdefs) = postgresMkColumns allDefs entity
newcols = filter (not . safeToRemove entity . cName) newcols'
udspair = map udToPair udefs
uniques = flip concatMap udspair $ \(uname, ucols) ->
[AlterTable name $ AddUniqueConstraint uname ucols]
references =
mapMaybe
( \Column{cName, cReference} ->
getAddReference allDefs entity cName =<< cReference
)
newcols
foreignsAlt = mapMaybe (mkForeignAlt entity) fdefs
-- HACK! This was added to preserve existing behaviour during a refactor.
-- The migrator currently expects to only see cReference set in the old
-- columns if it is also set in the new ones. It also ignores any existing
-- FK constraints in the database that don't match the expected FK
-- constraint name as defined by the Persistent EntityDef.
--
-- This means that the migrator sometimes behaves incorrectly for standalone
-- Foreign declarations, like Child in the ForeignKey test in
-- persistent-test, as well as in situations where there are duplicate FK
-- constraints for a given column.
--
-- See https://github.com/yesodweb/persistent/issues/1611#issuecomment-3613251095 for
-- more info
pickColumnReference (oldCol, oldReferences) =
case List.find (\c -> cName c == cName oldCol) newcols of
Just new -> fromMaybe oldCol $ do
-- Note that if this do block evaluates to Nothing, it means
-- we'll return a Column that has cReference = Nothing -
-- effectively, we are telling the migrator that this particular
-- column has no FK constraints in the DB.
-- If the persistent models don't define a FK constraint, ignore
-- any FK constraints that might exist in the DB (this is
-- arguably a bug, but it's a pre-existing one)
newRef <- cReference new
-- If the persistent models _do_ define an FK constraint but
-- there's no matching FK constraint in the DB, we don't have
-- to do anything else here: `getAlters` should handle adding
-- the FK constraint for us
oldRef <-
List.find
(\oldRef -> crConstraintName oldRef == crConstraintName newRef)
oldReferences
-- Finally, if the persistent models define an FK constraint and
-- an FK constraint of that name exists in the DB, return it, so
-- that `getAlters` can check that the constraint is set up
-- correctly
pure $ oldCol{cReference = Just oldRef}
Nothing ->
-- We have a column that exists in the DB but not in the
-- EntityDef. We can no-op here, since `getAlters` will handle
-- dropping this for us.
oldCol
-- | Indicates whether a Postgres Column is safe to drop.
--
-- @since 2.17.1.0
newtype SafeToRemove = SafeToRemove Bool
deriving (Show, Eq)
-- | Represents a change to a Postgres column in a DB statement.
--
-- @since 2.17.1.0
data AlterColumn
= ChangeType Column SqlType Text
| IsNull Column
| NotNull Column
| AddColumn Column
| Drop Column SafeToRemove
| Default Column Text
| NoDefault Column
| UpdateNullToValue Column Text
| AddReference
EntityNameDB
ConstraintNameDB
(NEL.NonEmpty FieldNameDB)
[Text]
FieldCascade
| DropReference ConstraintNameDB
deriving (Show, Eq)
-- | Represents a change to a Postgres table in a DB statement.
--
-- @since 2.17.1.0
data AlterTable
= AddUniqueConstraint ConstraintNameDB [FieldNameDB]
| DropConstraint ConstraintNameDB
deriving (Show, Eq)
-- | Represents a change to a Postgres DB in a statement.
--
-- @since 2.17.1.0
data AlterDB
= AddTable EntityNameDB EntityIdDef [Column]
| AlterColumn EntityNameDB AlterColumn
| AlterTable EntityNameDB AlterTable
deriving (Show, Eq)
-- | Create a table if it doesn't exist.
--
-- @since 2.17.1.0
addTable :: [Column] -> EntityDef -> AlterDB
addTable cols entity =
AddTable name entityId nonIdCols
where
nonIdCols =
case entityPrimary entity of
Just _ ->
cols
_ ->
filter keepField cols
where
keepField c =
Just (cName c) /= fmap fieldDB (getEntityIdField entity)
&& not (safeToRemove entity (cName c))
entityId = getEntityId entity
name = getEntityDBName entity
maySerial :: SqlType -> Maybe Text -> Text
maySerial SqlInt64 Nothing = " SERIAL8 "
maySerial sType _ = " " <> showSqlType sType
mayDefault :: Maybe Text -> Text
mayDefault def = case def of
Nothing -> ""
Just d -> " DEFAULT " <> d
getAlters
:: [EntityDef]
-> EntityDef
-> ([Column], [(ConstraintNameDB, [FieldNameDB])])
-> ([Column], [(ConstraintNameDB, [FieldNameDB])])
-> ([AlterColumn], [AlterTable])
getAlters defs def (c1, u1) (c2, u2) =
(getAltersC c1 c2, getAltersU u1 u2)
where
getAltersC [] old =
map (\x -> Drop x $ SafeToRemove $ safeToRemove def $ cName x) old
getAltersC (new : news) old =
let
(alters, old') = findAlters defs def new old
in
alters ++ getAltersC news old'
getAltersU
:: [(ConstraintNameDB, [FieldNameDB])]
-> [(ConstraintNameDB, [FieldNameDB])]
-> [AlterTable]
getAltersU [] old =
map DropConstraint $ filter (not . isManual) $ map fst old
getAltersU ((name, cols) : news) old =
case lookup name old of
Nothing ->
AddUniqueConstraint name cols : getAltersU news old
Just ocols ->
let
old' = filter (\(x, _) -> x /= name) old
in
if sort cols == sort ocols
then getAltersU news old'
else
DropConstraint name
: AddUniqueConstraint name cols
: getAltersU news old'
-- Don't drop constraints which were manually added.
isManual (ConstraintNameDB x) = "__manual_" `T.isPrefixOf` x
-- | Postgres' default maximum identifier length in bytes
-- (You can re-compile Postgres with a new limit, but I'm assuming that virtually noone does this).
-- See https://www.postgresql.org/docs/11/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
maximumIdentifierLength :: Int
maximumIdentifierLength = 63
-- | Intelligent comparison of SQL types, to account for SqlInt32 vs SqlOther integer
sqlTypeEq :: SqlType -> SqlType -> Bool
sqlTypeEq x y =
let
-- Non exhaustive helper to map postgres aliases to the same name. Based on
-- https://www.postgresql.org/docs/9.5/datatype.html.
-- This prevents needless `ALTER TYPE`s when the type is the same.
normalize "int8" = "bigint"
normalize "serial8" = "bigserial"
normalize v = v
in
normalize (T.toCaseFold (showSqlType x))
== normalize (T.toCaseFold (showSqlType y))
-- We check if we should alter a foreign key. This is almost an equality check,
-- except we consider 'Nothing' and 'Just Restrict' equivalent.
equivalentRef :: Maybe ColumnReference -> Maybe ColumnReference -> Bool
equivalentRef Nothing Nothing = True
equivalentRef (Just cr1) (Just cr2) =
crTableName cr1 == crTableName cr2
&& crConstraintName cr1 == crConstraintName cr2
&& eqCascade (fcOnUpdate $ crFieldCascade cr1) (fcOnUpdate $ crFieldCascade cr2)
&& eqCascade (fcOnDelete $ crFieldCascade cr1) (fcOnDelete $ crFieldCascade cr2)
where
eqCascade :: Maybe CascadeAction -> Maybe CascadeAction -> Bool
eqCascade Nothing Nothing = True
eqCascade Nothing (Just Restrict) = True
eqCascade (Just Restrict) Nothing = True
eqCascade (Just cs1) (Just cs2) = cs1 == cs2
eqCascade _ _ = False
equivalentRef _ _ = False
-- | Generate the default foreign key constraint name for a given source table and
-- source column name. Note that this function should generally not be used
-- except as an argument to postgresMkColumns, because if you use it in other contexts,
-- you're likely to miss nonstandard constraint names declared in the persistent
-- models files via `constraint=`
refName :: EntityNameDB -> FieldNameDB -> ConstraintNameDB
refName (EntityNameDB table) (FieldNameDB column) =
let
overhead = T.length $ T.concat ["_", "_fkey"]
(fromTable, fromColumn) = shortenNames overhead (T.length table, T.length column)
in
ConstraintNameDB $
T.concat [T.take fromTable table, "_", T.take fromColumn column, "_fkey"]
where
-- Postgres automatically truncates too long foreign keys to a combination of
-- truncatedTableName + "_" + truncatedColumnName + "_fkey"
-- This works fine for normal use cases, but it creates an issue for Persistent
-- Because after running the migrations, Persistent sees the truncated foreign key constraint
-- doesn't have the expected name, and suggests that you migrate again
-- To workaround this, we copy the Postgres truncation approach before sending foreign key constraints to it.
--
-- I believe this will also be an issue for extremely long table names,
-- but it's just much more likely to exist with foreign key constraints because they're usually tablename * 2 in length
-- Approximation of the algorithm Postgres uses to truncate identifiers
-- See makeObjectName https://github.com/postgres/postgres/blob/5406513e997f5ee9de79d4076ae91c04af0c52f6/src/backend/commands/indexcmds.c#L2074-L2080
shortenNames :: Int -> (Int, Int) -> (Int, Int)
shortenNames overhead (x, y)
| x + y + overhead <= maximumIdentifierLength = (x, y)
| x > y = shortenNames overhead (x - 1, y)
| otherwise = shortenNames overhead (x, y - 1)
postgresMkColumns
:: [EntityDef] -> EntityDef -> ([Column], [UniqueDef], [ForeignDef])
postgresMkColumns allDefs t =
mkColumns allDefs t $
setBackendSpecificForeignKeyName refName emptyBackendSpecificOverrides
-- | Check if a column name is listed as the "safe to remove" in the entity
-- list.
safeToRemove :: EntityDef -> FieldNameDB -> Bool
safeToRemove def (FieldNameDB colName) =
any (elem FieldAttrSafeToRemove . fieldAttrs) $
filter ((== FieldNameDB colName) . fieldDB) $
allEntityFields
where
allEntityFields =
getEntityFieldsDatabase def <> case getEntityId def of
EntityIdField fdef ->
[fdef]
_ ->
[]
udToPair :: UniqueDef -> (ConstraintNameDB, [FieldNameDB])
udToPair ud = (uniqueDBName ud, map snd $ NEL.toList $ uniqueFields ud)
-- | Get the references to be added to a table for the given column.
getAddReference
:: [EntityDef]
-> EntityDef
-> FieldNameDB
-> ColumnReference
-> Maybe AlterDB
getAddReference allDefs entity cname cr@ColumnReference{crTableName = s, crConstraintName = constraintName} = do
guard $ Just cname /= fmap fieldDB (getEntityIdField entity)
pure $
AlterColumn
table
(AddReference s constraintName (cname NEL.:| []) id_ (crFieldCascade cr))
where
table = getEntityDBName entity
id_ =
fromMaybe
(error $ "Could not find ID of entity " ++ show s)
$ do
entDef <- find ((== s) . getEntityDBName) allDefs
return $ NEL.toList $ Util.dbIdColumnsEsc escapeF entDef
mkForeignAlt
:: EntityDef
-> ForeignDef
-> Maybe AlterDB
mkForeignAlt entity fdef = case NEL.nonEmpty childfields of
Nothing -> Nothing
Just childfields' -> Just $ AlterColumn tableName_ addReference
where
addReference =
AddReference
(foreignRefTableDBName fdef)
constraintName
childfields'
escapedParentFields
(foreignFieldCascade fdef)
where
tableName_ = getEntityDBName entity
constraintName =
foreignConstraintNameDBName fdef
(childfields, parentfields) =
unzip (map (\((_, b), (_, d)) -> (b, d)) (foreignFields fdef))
escapedParentFields =
map escapeF parentfields
escapeC :: ConstraintNameDB -> Text
escapeC = escapeWith escape
escapeE :: EntityNameDB -> Text
escapeE = escapeWith escape
escapeF :: FieldNameDB -> Text
escapeF = escapeWith escape
escape :: Text -> Text
escape s =
T.pack $ '"' : go (T.unpack s) ++ "\""
where
go "" = ""
go ('"' : xs) = "\"\"" ++ go xs
go (x : xs) = x : go xs
showAlterDb :: AlterDB -> (Bool, Text)
showAlterDb (AddTable name entityId nonIdCols) = (False, rawText)
where
idtxt =
case entityId of
EntityIdNaturalKey pdef ->
T.concat
[ " PRIMARY KEY ("
, T.intercalate "," $ map (escapeF . fieldDB) $ NEL.toList $ compositeFields pdef
, ")"
]
EntityIdField field ->
let
defText = defaultAttribute $ fieldAttrs field
sType = fieldSqlType field
in
T.concat
[ escapeF $ fieldDB field
, maySerial sType defText
, " PRIMARY KEY UNIQUE"
, mayDefault defText
]
rawText =
T.concat
-- Lower case e: see Database.Persist.Sql.Migration
[ "CREATe TABLE " -- DO NOT FIX THE CAPITALIZATION!
, escapeE name
, "("
, idtxt
, if null nonIdCols then "" else ","
, T.intercalate "," $ map showColumn nonIdCols
, ")"
]
showAlterDb (AlterColumn t ac) =
(isUnsafe ac, showAlter t ac)
where
isUnsafe (Drop _ (SafeToRemove safeRemove)) = not safeRemove
isUnsafe _ = False
showAlterDb (AlterTable t at) = (False, showAlterTable t at)
showAlterTable :: EntityNameDB -> AlterTable -> Text
showAlterTable table (AddUniqueConstraint cname cols) =
T.concat
[ "ALTER TABLE "
, escapeE table
, " ADD CONSTRAINT "
, escapeC cname
, " UNIQUE("
, T.intercalate "," $ map escapeF cols
, ")"
]
showAlterTable table (DropConstraint cname) =
T.concat
[ "ALTER TABLE "
, escapeE table
, " DROP CONSTRAINT "
, escapeC cname
]
showAlter :: EntityNameDB -> AlterColumn -> Text
showAlter table (ChangeType c t extra) =
T.concat
[ "ALTER TABLE "
, escapeE table
, " ALTER COLUMN "
, escapeF (cName c)
, " TYPE "
, showSqlType t
, extra
]
showAlter table (IsNull c) =
T.concat
[ "ALTER TABLE "
, escapeE table
, " ALTER COLUMN "
, escapeF (cName c)
, " DROP NOT NULL"
]
showAlter table (NotNull c) =
T.concat
[ "ALTER TABLE "
, escapeE table
, " ALTER COLUMN "
, escapeF (cName c)
, " SET NOT NULL"
]
showAlter table (AddColumn col) =
T.concat
[ "ALTER TABLE "
, escapeE table