implement a basic brush store

* each stroke stores the brushes it uses by reference,
    so that one can interactively edit brushes
    and see the strokes be updated in real-time
  * when writing to a file, we retrive the brush code
    from the reference, and include that information statically
This commit is contained in:
sheaf 2020-11-14 23:32:23 +01:00
parent 393ef6f06e
commit 1e4bb4bddc
13 changed files with 758 additions and 504 deletions

View file

@ -122,6 +122,7 @@ executable MetaBrush
, MetaBrush.Asset.TickBox
, MetaBrush.Asset.Tools
, MetaBrush.Asset.WindowIcons
, MetaBrush.Brush
, MetaBrush.Context
, MetaBrush.Document
, MetaBrush.Document.Draw
@ -175,6 +176,10 @@ executable MetaBrush
^>= 1.4.2.1
, ghc-typelits-knownnat
^>= 0.7.3
, gi-cairo-render
^>= 0.0.1
, gi-cairo-connector
^>= 0.0.1
, gi-gdk
>= 3.0.22 && < 3.1
, gi-gio
@ -185,10 +190,10 @@ executable MetaBrush
^>= 2.0.24
, gi-gtk
>= 3.0.35 && < 3.1
, gi-cairo-render
^>= 0.0.1
, gi-cairo-connector
^>= 0.0.1
, gi-gtksource
>= 3.0.23 && < 3.1
, hashable
^>= 1.3.0.0
, haskell-gi-base
^>= 0.24.3
, lens
@ -207,5 +212,7 @@ executable MetaBrush
>= 1.2.3.1 && < 1.2.5
, tree-view
^>= 0.5
, unordered-containers
>= 0.2.11 && < 0.2.14
, waargonaut
^>= 0.8.0.1

View file

@ -65,6 +65,8 @@ import qualified GI.Gtk as GTK
-- lens
import Control.Lens
( (.~), set )
import Control.Lens.At
( at )
-- stm
import qualified Control.Concurrent.STM as STM
@ -82,6 +84,16 @@ import qualified SuperRecord
import qualified Data.Text as Text
( pack )
-- transformers
import Control.Monad.Trans.Reader
( runReaderT )
-- unordered-containers
import Data.HashMap.Strict
( HashMap )
import qualified Data.HashMap.Strict as HashMap
( fromList )
-- MetaBrush
import Math.Bezier.Cubic.Fit
( FitParameters(..) )
@ -99,6 +111,8 @@ import MetaBrush.Asset.Colours
( getColours )
import MetaBrush.Asset.Logo
( drawLogo )
import MetaBrush.Brush
( Brush, newBrushReference )
import MetaBrush.Context
( UIElements(..), Variables(..)
, Modifier(..)
@ -133,7 +147,7 @@ import MetaBrush.UI.Viewport
( Viewport(..), Ruler(..), createViewport )
import MetaBrush.Unique
( newUniqueSupply
, Unique, unsafeUnique
, Unique, freshUnique
, uniqueMapFromList
)
import MetaBrush.Util
@ -164,18 +178,26 @@ main = do
uniqueSupply <- newUniqueSupply
circleBrush <- Asset.Brushes.circle uniqueSupply
circleBrushUnique <- runReaderT freshUnique uniqueSupply
docUnique <- runReaderT freshUnique uniqueSupply
strokeUnique <- runReaderT freshUnique uniqueSupply
let
testBrushes :: HashMap Brush Unique
testBrushes = HashMap.fromList
[ ( circleBrush, circleBrushUnique ) ]
testDocuments :: Map Unique DocumentHistory
testDocuments = fmap newHistory $ uniqueMapFromList
[ emptyDocument "Test" ( unsafeUnique 0 )
[ emptyDocument "Test" docUnique
& ( field' @"documentContent" . field' @"strokes" ) .~
[ Stroke
{ strokeName = "Stroke 1"
, strokeVisible = True
, strokeUnique = unsafeUnique 10
, strokeBrush = circleBrush
, strokeSpline =
{ strokeName = "Stroke 1"
, strokeVisible = True
, strokeUnique = strokeUnique
, strokeBrushRef = newBrushReference @'[ "r" SuperRecord.:= Double ] circleBrushUnique
, strokeSpline =
Spline
{ splineStart = mkPoint ( Point2D 10 -20 ) 2
, splineCurves = OpenCurves $ Seq.fromList
@ -186,6 +208,7 @@ main = do
}
}
]
& ( field' @"documentBrushes" . at circleBrushUnique ) .~ ( Just circleBrush )
]
where
mkPoint :: Point2D Double -> Double -> PointData ( Super.Rec '[ "r" SuperRecord.:= Double ] )
@ -193,6 +216,7 @@ main = do
activeDocumentTVar <- STM.newTVarIO @( Maybe Unique ) Nothing
openDocumentsTVar <- STM.newTVarIO @( Map Unique DocumentHistory ) testDocuments
brushesTVar <- STM.newTVarIO @( HashMap Brush Unique ) testBrushes
mousePosTVar <- STM.newTVarIO @( Maybe ( Point2D Double ) ) Nothing
mouseHoldTVar <- STM.newTVarIO @( Maybe HoldAction ) Nothing
modifiersTVar <- STM.newTVarIO @( Set Modifier ) Set.empty
@ -204,14 +228,14 @@ main = do
showGuidesTVar <- STM.newTVarIO @Bool True
maxHistorySizeTVar <- STM.newTVarIO @Int 1000
fitParametersTVar <- STM.newTVarIO @FitParameters
( FitParameters
{ maxSubdiv = 6
, nbSegments = 12
, dist_tol = 5e-3
, t_tol = 1e-4
, maxIters = 100
}
)
( FitParameters
{ maxSubdiv = 6
, nbSegments = 12
, dist_tol = 5e-3
, t_tol = 1e-4
, maxIters = 100
}
)
-- Put all these stateful variables in a record for conciseness.
let

View file

@ -184,7 +184,9 @@ instance HandleAction OpenFile where
void $ GTK.nativeDialogRun fileChooser
filePaths <- GTK.fileChooserGetFilenames fileChooser
for_ filePaths \ filePath -> do
mbDoc <- loadDocument uniqueSupply filePath
knownBrushes <- STM.atomically $ STM.readTVar brushesTVar
( mbDoc, knownBrushes' ) <- loadDocument uniqueSupply knownBrushes filePath
STM.atomically ( STM.writeTVar brushesTVar knownBrushes' )
case mbDoc of
Left errMessage -> warningDialog window filePath errMessage
Right doc -> do
@ -233,7 +235,9 @@ instance HandleAction OpenFolder where
when exists do
filePaths <- listDirectory folderPath
for_ filePaths \ filePath -> when ( takeExtension filePath == ".mb" ) do
mbDoc <- loadDocument uniqueSupply ( folderPath </> filePath )
knownBrushes <- STM.atomically $ STM.readTVar brushesTVar
( mbDoc, knownBrushes' ) <- loadDocument uniqueSupply knownBrushes ( folderPath </> filePath )
STM.atomically ( STM.writeTVar brushesTVar knownBrushes' )
case mbDoc of
Left errMessage -> warningDialog window filePath errMessage
Right doc -> do

View file

@ -9,16 +9,6 @@
module MetaBrush.Asset.Brushes where
-- base
import Data.Kind
( Type )
import Data.Type.Equality
( (:~:)(Refl) )
-- superrecord
import qualified SuperRecord
( (:=) )
-- text
import Data.Text
( Text )
@ -26,10 +16,8 @@ import qualified Data.Text as Text
( unpack )
-- MetaBrush
import MetaBrush.Document
import MetaBrush.Brush
( Brush(..) )
import MetaBrush.MetaParameter.AST
( BrushFunction, STypesI(sTypesI), eqTys )
import MetaBrush.MetaParameter.Driver
( SomeBrushFunction(..), interpretBrush )
import MetaBrush.Unique
@ -37,11 +25,8 @@ import MetaBrush.Unique
--------------------------------------------------------------------------------
circle
:: forall circleBrushFields
. ( circleBrushFields ~ '[ "r" SuperRecord.:= Double ] )
=> UniqueSupply -> IO ( Brush circleBrushFields )
circle uniqueSupply = mkBrush @circleBrushFields uniqueSupply name code
circle :: UniqueSupply -> IO Brush
circle uniqueSupply = mkBrush uniqueSupply name code
where
name, code :: Text
name = "Circle"
@ -57,11 +42,8 @@ circle uniqueSupply = mkBrush @circleBrushFields uniqueSupply name code
\ -- (-r ,-r*c) -- (-r*c,-r ) -> ( 0,-r)\n\
\ -- ( r*c,-r ) -- ( r ,-r*c) -> . ]"
rounded
:: forall roundedBrushFields
. ( roundedBrushFields ~ '[ ] ) -- TODO
=> UniqueSupply -> IO ( Brush roundedBrushFields )
rounded uniqueSupply = mkBrush @roundedBrushFields uniqueSupply name code
rounded :: UniqueSupply -> IO Brush
rounded uniqueSupply = mkBrush uniqueSupply name code
where
name, code :: Text
name = "Rounded quadrilateral"
@ -88,21 +70,11 @@ rounded uniqueSupply = mkBrush @roundedBrushFields uniqueSupply name code
--------------------------------------------------------------------------------
mkBrush
:: forall ( givenBrushFields :: [ Type ] )
. STypesI givenBrushFields
=> UniqueSupply -> Text -> Text
-> IO ( Brush givenBrushFields )
mkBrush :: UniqueSupply -> Text -> Text -> IO Brush
mkBrush uniqSupply brushName brushCode = do
( mbBrush, _ ) <- interpretBrush uniqSupply brushCode
case mbBrush of
Left err -> error ( "Could not interpret '" <> Text.unpack brushName <> "' brush:\n" <> show err )
Right ( SomeBrushFunction ( brushFunction :: BrushFunction inferredBrushFields ) ) ->
case eqTys @givenBrushFields @inferredBrushFields of
Just Refl -> pure ( BrushData { brushName, brushCode, brushFunction } )
Nothing ->
error
( "Incorrect record type for '" <> Text.unpack brushName <> "' brush:\n\
\Expected: " <> show ( sTypesI @givenBrushFields ) <> "\n\
\ Actual: " <> show ( sTypesI @inferredBrushFields )
)
Left err ->
error ( "Could not interpret '" <> Text.unpack brushName <> "' brush:\n" <> show err )
Right ( SomeBrushFunction brushFunction ) ->
pure ( BrushData { brushName, brushCode, brushFunction } )

370
src/app/MetaBrush/Brush.hs Normal file
View file

@ -0,0 +1,370 @@
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.KnownNat.Solver #-}
module MetaBrush.Brush
( Brush(..), BrushReference(..), newBrushReference
, SomeBrushFields(..), SomeFieldSType(..), reflectBrushFieldsNoDups
)
where
-- base
import Data.Kind
( Type )
import Data.List
( intersect )
import Data.Proxy
( Proxy )
import Data.Type.Equality
( (:~:)(Refl) )
import GHC.Exts
( Proxy#, proxy# )
import GHC.TypeLits
( KnownSymbol, SomeSymbol(..)
, someSymbolVal, symbolVal'
)
import GHC.TypeNats
( KnownNat, SomeNat(..), someNatVal, type (-) )
import Unsafe.Coerce
( unsafeCoerce )
-- deepseq
import Control.DeepSeq
( NFData(..), deepseq )
-- groups
import Data.Group
( Group )
-- hashable
import Data.Hashable
( Hashable(..) )
-- superrecord
import qualified SuperRecord as Super
( Rec )
import qualified SuperRecord
( Has, RecTy, (:=)
, RecSize, RecApply(..), RecVecIdxPos, UnsafeRecBuild(..)
, TraversalCHelper, RemoveAccessTo, Intersect
)
import SuperRecord
( ConstC, Tuple22C )
-- text
import Data.Text
( Text )
import qualified Data.Text as Text
( pack, unpack )
-- unordered-containers
import Data.HashMap.Strict
( HashMap )
import qualified Data.HashMap.Strict as HashMap
( fromList, lookup )
-- MetaBrush
import Math.Module
( Module )
import Math.Vector2D
( Point2D )
import {-# SOURCE #-} MetaBrush.Document.Serialise
( Serialisable )
import MetaBrush.MetaParameter.AST
( SType(..), STypeI(..), SomeSType(..), STypes(..), STypesI(..), someSTypes
, Adapted, BrushFunction
, MapFields, UniqueField, UseFieldsInBrush
)
import MetaBrush.MetaParameter.Interpolation
( Interpolatable(..), MapDiff, HasDiff', HasTorsor )
import MetaBrush.Unique
( Unique )
--------------------------------------------------------------------------------
data Brush where
BrushData
:: forall brushFields
. ( STypesI brushFields )
=>
{ brushName :: !Text
, brushCode :: !Text
, brushFunction :: !( BrushFunction brushFields )
}
-> Brush
instance Show Brush where
show ( BrushData { brushName } ) = Text.unpack brushName
instance NFData Brush where
rnf ( BrushData { brushName, brushCode } )
= deepseq brushCode
$ rnf brushName
instance Eq Brush where
BrushData name1 code1 _ == BrushData name2 code2 _ = name1 == name2 && code1 == code2
instance Ord Brush where
compare ( BrushData name1 code1 _ ) ( BrushData name2 code2 _ ) = compare ( name1, code1 ) ( name2, code2 )
instance Hashable Brush where
hashWithSalt salt ( BrushData { brushName, brushCode } ) =
hashWithSalt ( hashWithSalt salt brushName ) brushCode
data BrushReference pointFields where
NoBrush :: BrushReference pointFields
BrushReference
:: forall brushFields pointFields usedFields brushParams usedParams
. ( brushParams ~ Super.Rec brushFields, STypesI brushFields
, usedParams ~ Super.Rec usedFields
, Interpolatable usedParams
, Adapted brushFields pointFields usedFields
)
=> Proxy# brushFields
-> Unique
-> BrushReference pointFields
instance Show ( BrushReference pointFields ) where
show NoBrush = "NoBrush"
show ( BrushReference ( _ :: Proxy# brushFields ) uniq ) =
"BrushReference @(" <> show ( sTypesI @brushFields ) <> ") " <> show uniq
instance NFData ( BrushReference pointFields ) where
rnf NoBrush = ()
rnf ( BrushReference _ unique ) = rnf unique
newBrushReference
:: forall brushFields pointFields
. ( STypesI brushFields, STypesI pointFields )
=> Unique
-> BrushReference pointFields
newBrushReference uniq = case proveAdapted @brushFields @pointFields of
Dict -> BrushReference ( proxy# :: Proxy# brushFields ) uniq
--------------------------------------------------------------------------------
-- Instance dictionary passing machinery.
data Dict c where
Dict :: c => Dict c
proveAdapted
:: forall brushFields givenFields usedFields drts_used
. ( STypesI brushFields, STypesI givenFields
, usedFields ~ ( brushFields `SuperRecord.Intersect` givenFields )
, drts_used ~ MapDiff usedFields
)
=> Dict ( Adapted brushFields givenFields usedFields
, Interpolatable ( Super.Rec usedFields )
)
proveAdapted = case go ( sTypesI @brushFields ) of { Dict -> Dict }
where
brushFields, givenFields, usedFields :: [ ( Text, SomeSType ) ]
brushFields = someSTypes @brushFields
givenFields = someSTypes @givenFields
usedFields = intersect brushFields givenFields
nbUsedFields :: Int
nbUsedFields = length usedFields
givenIxFieldsMap, usedIxFieldsMap :: HashMap Text Int
givenIxFieldsMap = listToEndIndexMap givenFields
usedIxFieldsMap = listToEndIndexMap usedFields
go :: forall lts_brush lts_used dlts_used
. ( lts_used ~ ( lts_brush `SuperRecord.Intersect` givenFields )
, dlts_used ~ MapDiff lts_used
)
=> STypes lts_brush
-> Dict
( SuperRecord.UnsafeRecBuild usedFields lts_used ( SuperRecord.Has givenFields )
, SuperRecord.RecApply ( MapFields UniqueField brushFields ) ( MapFields UniqueField lts_brush ) ( UseFieldsInBrush usedFields )
, SuperRecord.UnsafeRecBuild drts_used dlts_used ( ConstC Monoid )
, SuperRecord.UnsafeRecBuild drts_used dlts_used ( ConstC ( Module Double ) )
, SuperRecord.RecApply drts_used dlts_used ( Tuple22C ( ConstC ( Module Double ) ) ( SuperRecord.Has drts_used ) )
, SuperRecord.RecApply drts_used dlts_used ( Tuple22C ( ConstC Semigroup ) ( SuperRecord.Has drts_used ) )
, SuperRecord.RecApply drts_used dlts_used ( Tuple22C ( ConstC Group ) ( SuperRecord.Has drts_used ) )
, SuperRecord.RecApply drts_used dlts_used ( HasDiff' usedFields )
, SuperRecord.TraversalCHelper dlts_used usedFields drts_used ( HasTorsor usedFields )
)
go STyNil
| SomeNat ( _ :: Proxy nbUsedFields ) <- someNatVal ( fromIntegral nbUsedFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize usedFields :~: nbUsedFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize drts_used :~: nbUsedFields )
= Dict
go sTyCons@STyCons
| SomeNat ( _ :: Proxy nbUsedFields ) <- someNatVal ( fromIntegral nbUsedFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize usedFields :~: nbUsedFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize drts_used :~: nbUsedFields )
, ( _ :: STypes ( ( k SuperRecord.:= v ) ': tail_lts_brush ) ) <- sTyCons
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapFields UniqueField brushFields ) :~: SuperRecord.RecSize brushFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k ( MapFields UniqueField brushFields ) :~: Just ( UniqueField v ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k ( MapFields UniqueField tail_lts_brush ) :~: MapFields UniqueField tail_lts_brush )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( MapFields UniqueField brushFields ) :~: SuperRecord.RecSize tail_lts_brush )
, let
k :: Text
k = Text.pack ( symbolVal' ( proxy# :: Proxy# k ) )
= case HashMap.lookup k usedIxFieldsMap of
Just k_used_indexFromEnd
| SomeNat ( _ :: Proxy k_used_indexFromEnd ) <- someNatVal ( fromIntegral k_used_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k lts_used :~: k_used_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k usedFields :~: k_used_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k drts_used :~: k_used_indexFromEnd )
, Just k_given_indexFromEnd <- HashMap.lookup k givenIxFieldsMap
, SomeNat ( _ :: Proxy k_given_indexFromEnd ) <- someNatVal ( fromIntegral k_given_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k givenFields :~: k_given_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k lts_used :~: Just v )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k usedFields :~: Just v )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k givenFields :~: Just v )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k drts_used :~: Just ( Diff v ) )
, ( _ :: Proxy# tail_lts_used ) <- ( proxy# :: Proxy# ( tail_lts_brush `SuperRecord.Intersect` givenFields ) )
, ( _ :: Proxy# tail_dlts_used ) <- ( proxy# :: Proxy# ( MapDiff tail_lts_used ) )
, Refl <- ( unsafeCoerce Refl :: lts_used :~: ( ( k SuperRecord.:= v ) ': tail_lts_used ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize tail_lts_used :~: ( SuperRecord.RecSize lts_used - 1 ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize tail_dlts_used :~: SuperRecord.RecSize tail_lts_used )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k tail_dlts_used :~: tail_dlts_used )
, Just Dict <- interpolatableDict @v
-> case go ( sTypesI @tail_lts_brush ) of { Dict -> Dict }
_
| Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k lts_used :~: Nothing )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k usedFields :~: Nothing )
, Refl <- ( unsafeCoerce Refl :: lts_used :~: ( tail_lts_brush `SuperRecord.Intersect` givenFields ) )
-> case go ( sTypesI @tail_lts_brush ) of { Dict -> Dict }
interpolatableDict :: forall t. STypeI t => Maybe ( Dict ( Interpolatable t ) )
interpolatableDict =
case sTypeI @t of
STyDouble -> Just Dict
sTyPoint@STyPoint
| ( _ :: SType ( Point2D c ) ) <- sTyPoint
, STyDouble <- sTypeI @c
-> Just Dict
_ -> Nothing
listToEndIndexMap :: ( Eq k, Hashable k ) => [ ( k, v ) ] -> HashMap k Int
listToEndIndexMap kvs =
HashMap.fromList
$ zipWith ( \ ( fieldName, _ ) index -> ( fieldName, lg - index - 1 ) )
kvs
[ 0 .. ]
where
lg :: Int
lg = length kvs
-- | Reflects a list of brush fields to the type level.
--
-- Assumes the input list has no duplicate field names,
-- but they don't have to be sorted.
reflectBrushFieldsNoDups :: [ ( Text, SomeFieldSType ) ] -> SomeBrushFields
reflectBrushFieldsNoDups = fromSomeBrushFieldsList . mkBrushFieldsList
where
mkBrushFieldsList :: [ ( Text, SomeFieldSType ) ] -> SomeBrushFieldsList
mkBrushFieldsList [] = SomeBrushFieldsList NilFields
mkBrushFieldsList ( ( k, SomeFieldSType ( _ :: Proxy# v ) ) : kvs )
| SomeBrushFieldsList ( kvs_list :: BrushFieldsList kvs ) <- mkBrushFieldsList kvs
, SomeSymbol ( _ :: Proxy k ) <- someSymbolVal ( Text.unpack k )
-- deduce RecSize ( MapDiff kvs ) ~ RecSize kvs
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapDiff kvs ) :~: SuperRecord.RecSize kvs )
-- compute indexing into record list (with SuperRecord, the index is the number of fields remaining)
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( ( k SuperRecord.:= v ) : kvs ) :~: SuperRecord.RecSize kvs )
= SomeBrushFieldsList ( ConsFields ( proxy# :: Proxy# k ) ( proxy# :: Proxy# v ) kvs_list )
fromSomeBrushFieldsList :: SomeBrushFieldsList -> SomeBrushFields
fromSomeBrushFieldsList ( SomeBrushFieldsList ( kvs :: BrushFieldsList kvs ) ) = case go @kvs kvs of
SomeClassyBrushFieldsList ( _ :: Proxy# kvs ) ( _ :: Proxy# kvs ) ->
SomeBrushFields ( proxy# :: Proxy# kvs )
where
go :: forall ( rts :: [ Type ] ) ( lts :: [ Type ] )
. ( STypesI rts, KnownNat ( SuperRecord.RecSize rts ), KnownNat ( SuperRecord.RecSize ( MapDiff rts ) ) )
=> BrushFieldsList lts -> SomeClassyBrushFieldsList rts lts
go NilFields =
SomeClassyBrushFieldsList ( proxy# :: Proxy# rts ) ( proxy# :: Proxy# '[] )
go ( ConsFields ( _ :: Proxy# k ) ( _ :: Proxy# a ) kvs' )
| ( SomeClassyBrushFieldsList _ ( _ :: Proxy# lts' ) ) <- go @rts kvs'
-- Assert some facts that result from the field names being distinct:
-- - current field name does not re-occur later on
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k lts' :~: lts' )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k ( MapDiff lts' ) :~: MapDiff lts' )
-- - looking up the type associated with the current field name returns the current type
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k rts :~: Just a )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k ( MapDiff rts ) :~: Just ( Diff a ) )
-- - MapDiff preserves length
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapDiff lts' ) :~: SuperRecord.RecSize lts' )
-- - compute the index (which is the number of fields remaining, i.e. the indexing starts counting from 0 from the right)
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k rts :~: SuperRecord.RecSize lts' )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( MapDiff rts ) :~: SuperRecord.RecSize lts' )
= SomeClassyBrushFieldsList ( proxy# :: Proxy# rts ) ( proxy# :: Proxy# ( ( k SuperRecord.:= a ) ': lts' ) )
-- | Existential type over an allowed record field type used in brushes, such as Double and Point2D Double.
data SomeFieldSType where
SomeFieldSType
:: ( STypeI a, Show a, NFData a, Serialisable a, Interpolatable a )
=> Proxy# a -> SomeFieldSType
-- | Existential type for allowed fields of a brush record.
data SomeBrushFields where
SomeBrushFields
:: forall kvs rec
. ( STypesI kvs
, rec ~ Super.Rec kvs
, Show rec, NFData rec
, Interpolatable rec
, Serialisable rec
)
=> Proxy# kvs -> SomeBrushFields
instance Show SomeBrushFields where
show ( SomeBrushFields ( _ :: Proxy# kvs ) ) = show ( sTypesI @kvs )
-- | Auxiliary datatype used to create a proof that record fields have the required instances.
data BrushFieldsList kvs where
NilFields :: BrushFieldsList '[]
ConsFields
::
( KnownSymbol k
, Show a, NFData a, Serialisable a
, Interpolatable a
, STypesI kvs
, KnownNat ( SuperRecord.RecSize kvs )
, SuperRecord.Has ( k SuperRecord.:= a ': kvs ) k a
)
=> Proxy# k -> Proxy# a -> BrushFieldsList kvs -> BrushFieldsList ( k SuperRecord.:= a ': kvs )
-- | Existential type used in the process of proving that record fields have the required instances.
data SomeBrushFieldsList where
SomeBrushFieldsList
:: ( STypesI kvs
, KnownNat ( SuperRecord.RecSize kvs )
, KnownNat ( SuperRecord.RecSize ( MapDiff kvs ) )
)
=> BrushFieldsList kvs -> SomeBrushFieldsList
-- | Type used to backtrack instance resolution in the SuperRecord library,
-- to witness the required typeclass instances by induction on the record fields.
data SomeClassyBrushFieldsList rts lts where
SomeClassyBrushFieldsList
:: forall rts lts drts dlts
. ( drts ~ MapDiff rts
, dlts ~ MapDiff lts
, KnownNat ( SuperRecord.RecSize rts )
, KnownNat ( SuperRecord.RecSize drts )
, SuperRecord.UnsafeRecBuild rts lts ( ConstC Serialisable )
, SuperRecord.UnsafeRecBuild drts dlts ( ConstC ( Module Double ) )
, SuperRecord.UnsafeRecBuild drts dlts ( ConstC Monoid )
, SuperRecord.RecApply rts lts ( ConstC Show )
, SuperRecord.RecApply rts lts ( ConstC NFData )
, SuperRecord.RecApply rts lts ( ConstC Serialisable )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC ( Module Double ) ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC Semigroup ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC Group ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( HasDiff' rts )
, SuperRecord.TraversalCHelper dlts rts drts ( HasTorsor rts )
)
=> Proxy# rts -> Proxy# lts -> SomeClassyBrushFieldsList rts lts

View file

@ -28,6 +28,10 @@ import qualified GI.Gtk as GTK
import qualified Control.Concurrent.STM.TVar as STM
( TVar )
-- unordered-containers
import Data.HashMap.Strict
( HashMap )
-- MetaBrush
import Math.Bezier.Cubic.Fit
( FitParameters )
@ -35,6 +39,8 @@ import Math.Vector2D
( Point2D )
import MetaBrush.Asset.Colours
( Colours )
import MetaBrush.Brush
( Brush )
import MetaBrush.Document.Draw
( DrawAnchor )
import MetaBrush.Document.History
@ -73,6 +79,7 @@ data Variables
{ uniqueSupply :: !UniqueSupply
, activeDocumentTVar :: !( STM.TVar ( Maybe Unique ) )
, openDocumentsTVar :: !( STM.TVar ( Map Unique DocumentHistory ) )
, brushesTVar :: !( STM.TVar ( HashMap Brush Unique ) )
, mousePosTVar :: !( STM.TVar ( Maybe ( Point2D Double ) ) )
, mouseHoldTVar :: !( STM.TVar ( Maybe HoldAction ) )
, modifiersTVar :: !( STM.TVar ( Set Modifier ) )

View file

@ -8,6 +8,7 @@
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
@ -26,7 +27,6 @@ module MetaBrush.Document
, emptyDocument
, Stroke(..), StrokeSpline, _strokeSpline, overStrokeSpline
, PointData(..), BrushPointData(..), DiffPointData(..)
, Brush(..), emptyBrush
, FocusState(..), Hoverable(..), HoverContext(..)
, Guide(..)
, _selection, _coords, coords
@ -39,8 +39,6 @@ import Data.Coerce
( coerce )
import Data.Functor.Identity
( Identity(..) )
import Data.Kind
( Type )
import Data.Semigroup
( Arg(..), Min(..), ArgMin )
import GHC.Generics
@ -81,14 +79,10 @@ import Control.Concurrent.STM
-- superrecord
import qualified SuperRecord as Super
( Rec )
import qualified SuperRecord
( Intersect, rnil )
-- text
import Data.Text
( Text )
import qualified Data.Text as Text
( unpack )
-- transformers
import Control.Monad.Trans.Reader
@ -96,7 +90,7 @@ import Control.Monad.Trans.Reader
-- MetaBrush
import Math.Bezier.Spline
( Spline(..), KnownSplineType, Curves(..) )
( Spline(..), KnownSplineType )
import Math.Bezier.Stroke
( CachedStroke )
import Math.Module
@ -107,10 +101,12 @@ import Math.Module
)
import Math.Vector2D
( Point2D(..), Vector2D(..) )
import MetaBrush.Brush
( Brush, BrushReference )
import {-# SOURCE #-} MetaBrush.Document.Serialise
( Serialisable(..) )
import MetaBrush.MetaParameter.AST
( STypesI, Adapted, AdaptableFunction(..), BrushFunction )
( STypesI(..) )
import MetaBrush.MetaParameter.Interpolation
( Interpolatable(..) ) -- + orphan instances
import MetaBrush.UI.Viewport
@ -145,6 +141,7 @@ data Document
, zoomFactor :: !Double
, documentUnique :: Unique
, documentContent :: !DocumentContent
, documentBrushes :: !( Map Unique Brush )
}
deriving stock ( Show, Generic )
deriving anyclass NFData
@ -166,28 +163,23 @@ data Stroke where
Stroke
:: ( KnownSplineType clo
, pointParams ~ Super.Rec pointFields, STypesI pointFields
, brushParams ~ Super.Rec brushFields, STypesI brushFields
, usedParams ~ Super.Rec usedFields , STypesI usedFields
, usedFields ~ ( brushFields `SuperRecord.Intersect` pointFields )
, Show brushParams, NFData brushParams
, Show pointParams, NFData pointParams
, Interpolatable pointParams
, Interpolatable usedParams
, Serialisable pointParams
, Adapted brushFields pointFields usedFields
)
=>
{ strokeName :: Text
, strokeVisible :: !Bool
, strokeUnique :: Unique
, strokeBrush :: Brush brushFields
, strokeSpline :: !( StrokeSpline clo pointParams )
{ strokeName :: !Text
, strokeVisible :: !Bool
, strokeUnique :: Unique
, strokeBrushRef :: !( BrushReference pointFields )
, strokeSpline :: !( StrokeSpline clo pointParams )
}
-> Stroke
deriving stock instance Show Stroke
instance NFData Stroke where
rnf ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline } )
rnf ( Stroke { strokeName, strokeVisible, strokeUnique, strokeBrushRef, strokeSpline } )
= deepseq strokeSpline
. deepseq strokeBrushRef
. deepseq strokeUnique
. deepseq strokeVisible
$ rnf strokeName
@ -224,21 +216,6 @@ overStrokeSpline
overStrokeSpline f = coerce ( _strokeSpline @Identity ( coerce . f ) )
data Brush ( brushFields :: [ Type ] )
= BrushData
{ brushName :: !Text
, brushCode :: !Text
, brushFunction :: !( BrushFunction brushFields )
}
instance Show ( Brush brushFields ) where
show ( BrushData { brushName } ) = Text.unpack brushName
-- Brush parameters using open records.
emptyBrush :: Brush '[]
emptyBrush = BrushData "Empty brush" ""
( AdaptableFunction ( const SuperRecord.rnil, const $ Spline ( Point2D 0 0 ) NoCurves ) )
data PointData params
= PointData
{ pointCoords :: !( Point2D Double )
@ -285,6 +262,7 @@ emptyDocument docName unique =
, strokes = []
, guides = Map.empty
}
, documentBrushes = Map.empty
}
--------------------------------------------------------------------------------

View file

@ -65,10 +65,11 @@ import Math.Module
( squaredNorm )
import Math.Vector2D
( Point2D(..), Vector2D(..) )
import MetaBrush.Brush
( BrushReference(NoBrush) )
import MetaBrush.Document
( Document(..), Stroke(..), StrokeSpline
, FocusState(..), PointData(..)
, emptyBrush
, _selection, _strokeSpline
, coords, overStrokeSpline
)
@ -118,11 +119,11 @@ getOrCreateDrawAnchor uniqueSupply c doc@( Document { zoomFactor } ) =
newStroke :: Stroke
newStroke =
Stroke
{ strokeName = "Stroke " <> uniqueText uniq
, strokeVisible = True
, strokeUnique = uniq
, strokeSpline = newSpline
, strokeBrush = emptyBrush
{ strokeName = "Stroke " <> uniqueText uniq
, strokeVisible = True
, strokeUnique = uniq
, strokeSpline = newSpline
, strokeBrushRef = NoBrush
}
newDoc' :: Document
newDoc'

View file

@ -1,6 +1,4 @@
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE BlockArguments #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
@ -12,12 +10,9 @@
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.KnownNat.Solver #-}
module MetaBrush.Document.Serialise
( Serialisable(..)
, documentToJSON, documentFromJSON
@ -27,7 +22,7 @@ module MetaBrush.Document.Serialise
-- base
import Control.Arrow
( (&&&) )
( (&&&), first )
import Control.Monad
( unless )
import qualified Data.Bifunctor as Bifunctor
@ -38,22 +33,12 @@ import Data.Functor.Contravariant
( contramap )
import Data.Functor.Identity
( Identity(..) )
import Data.Kind
( Type )
import Data.List
( sortBy )
import Data.Ord
( comparing )
import Data.Proxy
( Proxy(Proxy) )
import Data.Type.Equality
( (:~:)(Refl) )
import Data.Typeable
( Typeable, eqT )
import GHC.Exts
( Proxy#, proxy# )
import GHC.TypeLits
( symbolVal', KnownSymbol, SomeSymbol(..), someSymbolVal, sameSymbol )
( symbolVal', KnownSymbol )
import GHC.TypeNats
( KnownNat )
import Unsafe.Coerce
@ -77,16 +62,12 @@ import qualified Data.ByteString.Lazy.Builder as Lazy.ByteString.Builder
import Data.Map.Strict
( Map )
import qualified Data.Map.Strict as Map
( fromList, elems )
( elems, empty, fromList, insert, lookup )
import Data.Sequence
( Seq )
import qualified Data.Sequence as Seq
( fromList )
-- deepseq
import Control.DeepSeq
( NFData(..) )
-- directory
import System.Directory
( canonicalizePath, createDirectoryIfMissing, doesFileExist )
@ -99,10 +80,6 @@ import System.FilePath
import Data.Generics.Product.Typed
( HasType(typed) )
-- groups
import Data.Group
( Group )
-- lens
import Control.Lens
( view )
@ -123,13 +100,12 @@ import qualified Control.Concurrent.STM as STM
import qualified SuperRecord as Super
( Rec )
import qualified SuperRecord
( Has, RecTy, (:=), FldProxy(..)
, RecSize, RecApply(..), RecVecIdxPos, UnsafeRecBuild(..)
, TraversalCHelper, RemoveAccessTo, Intersect
( FldProxy(..)
, RecSize, RecApply(..), UnsafeRecBuild(..)
, reflectRec
)
import SuperRecord
( ConstC, Tuple22C )
( ConstC )
-- text
import Data.Text
@ -140,10 +116,18 @@ import qualified Data.Text as Text
-- transformers
import Control.Monad.IO.Class
( MonadIO(liftIO) )
import Control.Monad.Trans.Reader
( runReaderT )
import Control.Monad.Trans.Class
( MonadTrans(lift) )
import Control.Monad.Trans.Reader
( runReaderT )
import Control.Monad.Trans.State.Strict
( StateT, runStateT, get, put )
-- unordered-containers
import Data.HashMap.Strict
( HashMap )
import qualified Data.HashMap.Strict as HashMap
( foldrWithKey', insert, lookup )
-- waargonaut
import qualified Waargonaut.Attoparsec as JSON.Decoder
@ -172,8 +156,12 @@ import qualified Waargonaut.Prettier as JSON
( prettyJson )
import qualified Waargonaut.Prettier as TonyMorris
( Natural )
import qualified Waargonaut.Types.JObject as JSON
( MapLikeObj )
import Waargonaut.Types.Json
( Json )
import qualified Waargonaut.Types.Whitespace as JSON
( WS )
-- MetaBrush
import qualified Math.Bezier.Cubic as Cubic
@ -186,26 +174,25 @@ import Math.Bezier.Spline
)
import Math.Bezier.Stroke
( CachedStroke(..) )
import Math.Module
( Module )
import Math.Vector2D
( Point2D(..), Vector2D(..), Segment )
import MetaBrush.Brush
( Brush(..), BrushReference(..), newBrushReference
, SomeBrushFields(..), SomeFieldSType(..), reflectBrushFieldsNoDups
)
import MetaBrush.Document
( Document(..), DocumentContent(..), Guide(..)
, Stroke(..), StrokeSpline
, PointData(..), Brush(..) , FocusState(..)
, PointData(..), FocusState(..)
)
import MetaBrush.MetaParameter.AST
( SType(..), STypeI(..), STypes(..), STypesI(..)
( SType(..), STypeI(..), STypesI(..)
, SomeSType(..), someSTypes
, Adapted, AdaptableFunction(..), BrushFunction
, MapFields, UniqueField, UseFieldsInBrush
, AdaptableFunction(..), BrushFunction
, eqTy, eqTys
)
import MetaBrush.MetaParameter.Driver
( SomeBrushFunction(..), interpretBrush )
import MetaBrush.MetaParameter.Interpolation
( Interpolatable(..), MapDiff, HasDiff', HasTorsor )
import MetaBrush.Unique
( Unique, UniqueSupply, freshUnique )
@ -226,9 +213,16 @@ documentToJSON
four = unsafeCoerce ( 4 :: Integer )
-- | Parse a document from JSON (given by a strict bytestring).
documentFromJSON :: UniqueSupply -> Maybe FilePath -> Strict.ByteString -> IO ( Either JSON.DecodeError Document )
documentFromJSON uniqueSupply mfp
= fmap ( Bifunctor.first fst )
--
-- Updates the store of brushes by adding any new brushes contained in the document.
documentFromJSON
:: UniqueSupply -> HashMap Brush Unique
-> Maybe FilePath
-> Strict.ByteString
-> IO ( Either JSON.DecodeError Document, HashMap Brush Unique )
documentFromJSON uniqueSupply brushUniques mfp
= fmap ( first $ Bifunctor.first fst )
. ( `runStateT` brushUniques )
. JSON.Decoder.decodeAttoparsecByteString ( decodeDocument uniqueSupply mfp )
--------------------------------------------------------------------------------
@ -247,12 +241,14 @@ saveDocument path doc = do
atomicReplaceFile Nothing path' ( documentToJSON doc )
-- | Load a MetaBrush document.
loadDocument :: UniqueSupply -> FilePath -> IO ( Either String Document )
loadDocument uniqueSupply fp = do
--
-- Updates the store of brushes by adding any new brushes contained in the document.
loadDocument :: UniqueSupply -> HashMap Brush Unique -> FilePath -> IO ( Either String Document, HashMap Brush Unique )
loadDocument uniqueSupply brushUniques fp = do
exists <- doesFileExist fp
if exists
then Bifunctor.first show <$> ( documentFromJSON uniqueSupply ( Just fp ) =<< Strict.ByteString.readFile fp )
else pure ( Left $ "No file at " <> fp )
then first ( Bifunctor.first show ) <$> ( documentFromJSON uniqueSupply brushUniques ( Just fp ) =<< Strict.ByteString.readFile fp )
else pure ( Left $ "No file at " <> fp, brushUniques )
--------------------------------------------------------------------------------
@ -348,21 +344,21 @@ encodeCurve
encodeCurve encodePtData = case ssplineType @clo of
SOpen -> JSON.Encoder.mapLikeObj \case
LineTo ( NextPoint p1 ) _ ->
JSON.Encoder.atKey' "p1" encodePtData p1
JSON.Encoder.atKey' "p1" encodePtData p1
Bezier2To p1 ( NextPoint p2 ) _ ->
JSON.Encoder.atKey' "p1" encodePtData p1
. JSON.Encoder.atKey' "p2" encodePtData p2
JSON.Encoder.atKey' "p1" encodePtData p1
. JSON.Encoder.atKey' "p2" encodePtData p2
Bezier3To p1 p2 ( NextPoint p3 ) _ ->
JSON.Encoder.atKey' "p1" encodePtData p1
. JSON.Encoder.atKey' "p2" encodePtData p2
. JSON.Encoder.atKey' "p3" encodePtData p3
JSON.Encoder.atKey' "p1" encodePtData p1
. JSON.Encoder.atKey' "p2" encodePtData p2
. JSON.Encoder.atKey' "p3" encodePtData p3
SClosed -> JSON.Encoder.mapLikeObj \case
LineTo BackToStart _ -> id
Bezier2To p1 BackToStart _ ->
JSON.Encoder.atKey' "p1" encodePtData p1
JSON.Encoder.atKey' "p1" encodePtData p1
Bezier3To p1 p2 BackToStart _ ->
JSON.Encoder.atKey' "p1" encodePtData p1
. JSON.Encoder.atKey' "p2" encodePtData p2
JSON.Encoder.atKey' "p1" encodePtData p1
. JSON.Encoder.atKey' "p2" encodePtData p2
decodeCurve
:: forall clo ptData m
@ -551,6 +547,7 @@ encodeSomeSType = JSON.Encoder.mapLikeObj \ ( SomeSType ( _ :: Proxy# ty ) ) ->
-> JSON.Encoder.atKey' "tag" JSON.Encoder.text "adaptableFun"
. JSON.Encoder.atKey' "fields" encodeFieldTypes ( someSTypes @kvs )
. JSON.Encoder.atKey' "res" encodeSomeSType ( SomeSType ( proxy# :: Proxy# res ) )
{-
decodeSomeSType :: Monad m => JSON.Decoder m SomeSType
decodeSomeSType = do
@ -583,32 +580,33 @@ decodeSomeSType = do
( SomeBrushFields ( _ :: Proxy# kvs ) ) <- JSON.Decoder.atKey "fields" decodeFieldTypes
( SomeSType ( _ :: Proxy# a ) ) <- JSON.Decoder.atKey "res" decodeSomeSType
pure ( SomeSType ( proxy# :: Proxy# ( AdaptableFunction kvs a ) ) )
_ -> throwError ( JSON.ParseFailed $ "Unsupported record field type with tag " <> tag )
-}
decodeSomeFieldSType :: Monad m => JSON.Decoder m SomeFieldSType
decodeSomeFieldSType = do
tag <- JSON.Decoder.atKey "tag" JSON.Decoder.text
tag <- JSON.Decoder.atKey "tag" JSON.Decoder.text
case tag of
"double" -> pure ( SomeFieldSType ( proxy# :: Proxy# Double ) )
"point" -> do
( SomeFieldSType ( _ :: Proxy# a ) ) <- JSON.Decoder.atKey "coords" decodeSomeFieldSType
case eqT @a @Double of
case eqTy @a @Double of
Just Refl -> pure ( SomeFieldSType ( proxy# :: Proxy# ( Point2D Double ) ) )
Nothing -> throwError ( JSON.ParseFailed "Point2D: non-Double coordinate type" )
_ -> throwError ( JSON.ParseFailed $ "Unsupported record field type with tag " <> tag )
encodeFieldTypes :: Monad f => JSON.Encoder f ( [ ( Text, SomeSType ) ] )
encodeFieldTypes :: Monad f => JSON.Encoder f [ ( Text, SomeSType ) ]
encodeFieldTypes = JSON.Encoder.keyValueTupleFoldable encodeSomeSType
decodeFieldTypes :: Monad m => JSON.Decoder m SomeBrushFields
decodeFieldTypes = do
fields <- JSON.Decoder.objectAsKeyValues JSON.Decoder.text decodeSomeFieldSType
let
sortedFields :: [ ( Text, SomeFieldSType ) ]
sortedFields = sortBy ( comparing fst ) fields
duplicates :: [ Text ]
duplicates = duplicatesAcc [] [] sortedFields
duplicatesAcc :: [ Text ] -> [ Text ] -> [ ( Text, SomeFieldSType ) ] -> [ Text ]
duplicates = duplicatesAcc [] [] fields
duplicatesAcc :: [ Text ] -> [ Text ] -> [ ( Text, whatever ) ] -> [ Text ]
duplicatesAcc _ dups [] = dups
duplicatesAcc seen dups ( ( k, _ ) : kvs )
| k `elem` seen
@ -616,89 +614,37 @@ decodeFieldTypes = do
| otherwise
= duplicatesAcc ( k : seen ) dups kvs
case duplicates of
[] -> pure ( mkBrushFields sortedFields )
[] -> pure ( reflectBrushFieldsNoDups fields )
[dup] -> throwError ( JSON.ParseFailed $ "Duplicate field name " <> dup <> " in brush record type" )
dups -> throwError ( JSON.ParseFailed $ "Duplicate field names in brush record type:\n" <> Text.unwords dups )
where
mkBrushFields :: [ ( Text, SomeFieldSType ) ] -> SomeBrushFields
mkBrushFields = fromSomeBrushFieldsList . mkBrushFieldsList
mkBrushFieldsList :: [ ( Text, SomeFieldSType ) ] -> SomeBrushFieldsList
mkBrushFieldsList [] = SomeBrushFieldsList NilFields
mkBrushFieldsList ( ( k, SomeFieldSType ( _ :: Proxy# v ) ) : kvs )
| SomeBrushFieldsList ( kvs_list :: BrushFieldsList kvs ) <- mkBrushFieldsList kvs
, SomeSymbol ( _ :: Proxy k ) <- someSymbolVal ( Text.unpack k )
-- deduce RecSize ( MapDiff kvs ) ~ RecSize kvs
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapDiff kvs ) :~: SuperRecord.RecSize kvs )
-- compute indexing into record list (with SuperRecord, the index is the number of fields remaining)
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( ( k SuperRecord.:= v ) : kvs ) :~: SuperRecord.RecSize kvs )
= SomeBrushFieldsList ( ConsFields ( proxy# :: Proxy# k ) ( proxy# :: Proxy# v ) kvs_list )
fromSomeBrushFieldsList :: SomeBrushFieldsList -> SomeBrushFields
fromSomeBrushFieldsList ( SomeBrushFieldsList ( kvs :: BrushFieldsList kvs ) ) = case go @kvs kvs of
SomeClassyBrushFieldsList ( _ :: Proxy# kvs ) ( _ :: Proxy# kvs ) ->
SomeBrushFields ( proxy# :: Proxy# kvs )
where
go :: forall ( rts :: [ Type ] ) ( lts :: [ Type ] )
. ( STypesI rts, KnownNat ( SuperRecord.RecSize rts ), KnownNat ( SuperRecord.RecSize ( MapDiff rts ) ) )
=> BrushFieldsList lts -> SomeClassyBrushFieldsList rts lts
go NilFields =
SomeClassyBrushFieldsList ( proxy# :: Proxy# rts ) ( proxy# :: Proxy# '[] )
go ( ConsFields ( _ :: Proxy# k ) ( _ :: Proxy# a ) kvs' )
| ( SomeClassyBrushFieldsList _ ( _ :: Proxy# lts' ) ) <- go @rts kvs'
-- Assert some facts that result from the field names being distinct:
-- - current field name does not re-occur later on
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k lts' :~: lts' )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k ( MapDiff lts' ) :~: MapDiff lts' )
-- - looking up the type associated with the current field name returns the current type
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k rts :~: Just a )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k ( MapDiff rts ) :~: Just ( Diff a ) )
-- - MapDiff preserves length
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapDiff lts' ) :~: SuperRecord.RecSize lts' )
-- - compute the index (which is the number of fields remaining, i.e. the indexing starts counting from 0 from the right)
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k rts :~: SuperRecord.RecSize lts' )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( MapDiff rts ) :~: SuperRecord.RecSize lts' )
= SomeClassyBrushFieldsList ( proxy# :: Proxy# rts ) ( proxy# :: Proxy# ( ( k SuperRecord.:= a ) ': lts' ) )
encodeBrush :: Applicative f => JSON.Encoder f ( Brush brushParams )
encodeBrush = JSON.Encoder.mapLikeObj \ ( BrushData { brushName, brushCode } ) ->
encodeBrush :: Applicative f => JSON.Encoder f Brush
encodeBrush = JSON.Encoder.mapLikeObj
\ ( BrushData { brushName, brushCode } ) ->
JSON.Encoder.atKey' "name" JSON.Encoder.text brushName
. JSON.Encoder.atKey' "code" JSON.Encoder.text brushCode
decodeBrush
:: forall m flds. ( MonadIO m, STypesI flds )
=> UniqueSupply
-> JSON.Decoder m ( Brush flds )
decodeBrush :: MonadIO m => UniqueSupply -> JSON.Decoder m Brush
decodeBrush uniqSupply = do
brushName <- JSON.Decoder.atKey "name" JSON.Decoder.text
brushCode <- JSON.Decoder.atKey "code" JSON.Decoder.text
( mbBrush, _ ) <- lift ( liftIO $ interpretBrush uniqSupply brushCode )
case mbBrush of
Left err -> throwError ( JSON.ParseFailed ( "Failed to interpret brush code:\n" <> ( Text.pack $ show err ) ) )
Right ( SomeBrushFunction ( brushFunction :: BrushFunction brushParams ) ) ->
case eqTys @flds @brushParams of
Just Refl -> pure ( BrushData { brushName, brushCode, brushFunction } )
Nothing ->
throwError
( JSON.ParseFailed $
"Brush has unexpected input record type:\n\
\Expected: " <> Text.pack ( show ( sTypesI @flds ) ) <> "\n\
\ Actual: " <> Text.pack ( show ( sTypesI @brushParams ) )
)
Right ( SomeBrushFunction brushFunction ) ->
pure ( BrushData { brushName, brushCode, brushFunction } )
encodeStroke :: Monad f => JSON.Encoder f Stroke
encodeStroke = JSON.Encoder.mapLikeObj
encodeStroke :: Monad f => Map Unique Brush -> JSON.Encoder f Stroke
encodeStroke brushes = JSON.Encoder.mapLikeObj
\ ( Stroke
{ strokeName
, strokeVisible
, strokeSpline = strokeSpline :: StrokeSpline clo ( Super.Rec pointFields )
, strokeBrush = strokeBrush :: Brush brushFields
, strokeBrushRef
}
) ->
let
@ -706,38 +652,58 @@ encodeStroke = JSON.Encoder.mapLikeObj
closed = case ssplineType @clo of
SClosed -> True
SOpen -> False
mbEncodeBrush :: JSON.MapLikeObj JSON.WS Json -> JSON.MapLikeObj JSON.WS Json
mbEncodeBrush = case strokeBrushRef of
BrushReference ( _ :: Proxy# brushFields1 ) unique ->
case Map.lookup unique brushes of
Nothing -> error ( "encodeStroke: no brush with unique " <> show unique <> "in environment" )
Just ( brush@BrushData { brushName, brushFunction = ( _ :: BrushFunction brushFields2 ) } ) ->
case eqTys @brushFields1 @brushFields2 of
Nothing -> error
( "encodeStroke: brush '" <> Text.unpack brushName <> "' has unexpected field types.\n\
\Expected: " <> show ( sTypesI @brushFields1 ) <> "\n\
\ Actual: " <> show ( sTypesI @brushFields2 )
)
Just Refl -> JSON.Encoder.atKey' "brush" encodeBrush brush
NoBrush -> id
in
JSON.Encoder.atKey' "name" JSON.Encoder.text strokeName
. JSON.Encoder.atKey' "visible" JSON.Encoder.bool strokeVisible
. JSON.Encoder.atKey' "closed" JSON.Encoder.bool closed
. JSON.Encoder.atKey' "brushFields" encodeFieldTypes ( someSTypes @brushFields )
. JSON.Encoder.atKey' "pointFields" encodeFieldTypes ( someSTypes @pointFields )
. JSON.Encoder.atKey' "usedFields" encodeFieldTypes ( someSTypes @( brushFields `SuperRecord.Intersect` pointFields ) )
. JSON.Encoder.atKey' "brush" encodeBrush strokeBrush
. mbEncodeBrush
. JSON.Encoder.atKey' "spline" ( encodeSpline encodePointData ) strokeSpline
decodeStroke :: forall m. MonadIO m => UniqueSupply -> JSON.Decoder m Stroke
decodeStroke :: forall m. MonadIO m => UniqueSupply -> JSON.Decoder ( StateT ( HashMap Brush Unique ) m ) Stroke
decodeStroke uniqueSupply = do
brushHashMap <- lift get
strokeName <- JSON.Decoder.atKey "name" JSON.Decoder.text
strokeVisible <- JSON.Decoder.atKey "visible" JSON.Decoder.bool
strokeUnique <- lift ( liftIO . STM.atomically $ runReaderT freshUnique uniqueSupply )
strokeClosed <- JSON.Decoder.atKey "closed" JSON.Decoder.bool
SomeBrushFields ( _ :: Proxy# brushFields ) <- JSON.Decoder.atKey "brushFields" decodeFieldTypes
SomeBrushFields ( _ :: Proxy# pointFields ) <- JSON.Decoder.atKey "pointFields" decodeFieldTypes
SomeBrushFields ( _ :: Proxy# usedFields ) <- JSON.Decoder.atKey "usedFields" decodeFieldTypes
strokeBrush <- JSON.Decoder.atKey "brush" ( decodeBrush @m @brushFields uniqueSupply )
case proveAdapted @brushFields @pointFields @usedFields of
Nothing -> throwError ( JSON.ParseFailed "Stroke: 'usedFields' is not equal to 'brushFields `Intersect` pointFields'" )
Just Dict ->
case strokeClosed of
True -> do
strokeSpline <- JSON.Decoder.atKey "spline" ( decodeSpline @Closed @( PointData ( Super.Rec pointFields ) ) decodePointData )
pure ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline, strokeBrush } )
False -> do
strokeSpline <- JSON.Decoder.atKey "spline" ( decodeSpline @Open @( PointData ( Super.Rec pointFields ) ) decodePointData )
pure ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline, strokeBrush } )
mbBrush <- JSON.Decoder.atKeyOptional "brush" ( decodeBrush uniqueSupply )
strokeBrushRef <-
case mbBrush of
Nothing -> pure NoBrush
Just ( brush@BrushData { brushFunction = _ :: BrushFunction brushFields } ) -> do
brushUnique <-
case HashMap.lookup brush brushHashMap of
Nothing -> do
brushUnique <- lift ( liftIO . STM.atomically $ runReaderT freshUnique uniqueSupply )
lift $ put ( HashMap.insert brush brushUnique brushHashMap )
pure brushUnique
Just brushUnique -> pure brushUnique
pure ( newBrushReference @brushFields brushUnique )
if strokeClosed
then do
strokeSpline <- JSON.Decoder.atKey "spline" ( decodeSpline @Closed @( PointData ( Super.Rec pointFields ) ) decodePointData )
pure ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline, strokeBrushRef } )
else do
strokeSpline <- JSON.Decoder.atKey "spline" ( decodeSpline @Open @( PointData ( Super.Rec pointFields ) ) decodePointData )
pure ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline, strokeBrushRef } )
encodeGuide :: Applicative f => JSON.Encoder f Guide
encodeGuide = JSON.Encoder.mapLikeObj \ ( Guide { guidePoint, guideNormal } ) ->
@ -756,12 +722,15 @@ decodeGuide uniqueSupply = do
encodeDocumentContent :: Applicative f => JSON.Encoder f DocumentContent
encodeDocumentContent = JSON.Encoder.mapLikeObj \ ( Content { guides, strokes } ) ->
JSON.Encoder.atKey' "guides" ( encodeUniqueMap encodeGuide ) guides
. JSON.Encoder.atKey' "strokes" ( JSON.Encoder.list encodeStroke ) strokes
encodeDocumentContent :: Applicative f => Map Unique Brush -> JSON.Encoder f DocumentContent
encodeDocumentContent brushes = JSON.Encoder.mapLikeObj \ ( Content { guides, strokes } ) ->
JSON.Encoder.atKey' "guides" ( encodeUniqueMap encodeGuide ) guides
. JSON.Encoder.atKey' "strokes" ( JSON.Encoder.list $ encodeStroke brushes ) strokes
decodeDocumentContent :: MonadIO m => UniqueSupply -> JSON.Decoder m DocumentContent
decodeDocumentContent
:: MonadIO m
=> UniqueSupply
-> JSON.Decoder ( StateT ( HashMap Brush Unique ) m ) DocumentContent
decodeDocumentContent uniqueSupply = do
let
unsavedChanges :: Bool
@ -775,189 +744,22 @@ decodeDocumentContent uniqueSupply = do
encodeDocument :: Applicative f => JSON.Encoder f Document
encodeDocument = JSON.Encoder.mapLikeObj \ ( Document { displayName, viewportCenter, zoomFactor, documentContent } ) ->
JSON.Encoder.atKey' "name" JSON.Encoder.text displayName
. JSON.Encoder.atKey' "center" ( encoder @( Point2D Double ) ) viewportCenter
. JSON.Encoder.atKey' "zoom" ( encoder @Double ) zoomFactor
. JSON.Encoder.atKey' "content" encodeDocumentContent documentContent
encodeDocument = JSON.Encoder.mapLikeObj
\ ( Document { displayName, viewportCenter, zoomFactor, documentContent, documentBrushes } ) ->
JSON.Encoder.atKey' "name" JSON.Encoder.text displayName
. JSON.Encoder.atKey' "center" ( encoder @( Point2D Double ) ) viewportCenter
. JSON.Encoder.atKey' "zoom" ( encoder @Double ) zoomFactor
. JSON.Encoder.atKey' "content" ( encodeDocumentContent documentBrushes ) documentContent
decodeDocument :: MonadIO m => UniqueSupply -> Maybe FilePath -> JSON.Decoder m Document
decodeDocument :: MonadIO m => UniqueSupply -> Maybe FilePath -> JSON.Decoder ( StateT ( HashMap Brush Unique ) m ) Document
decodeDocument uniqueSupply mbFilePath = do
displayName <- JSON.Decoder.atKey "name" JSON.Decoder.text
viewportCenter <- JSON.Decoder.atKey "center" ( decoder @( Point2D Double ) )
zoomFactor <- JSON.Decoder.atKey "zoom" ( decoder @Double )
documentUnique <- lift ( liftIO . STM.atomically $ runReaderT freshUnique uniqueSupply )
documentContent <- JSON.Decoder.atKey "content" ( decodeDocumentContent uniqueSupply )
pure ( Document { displayName, mbFilePath, viewportCenter, zoomFactor, documentUnique, documentContent } )
--------------------------------------------------------------------------------
-- Various auxiliary types.
-- | Existential type over an allowed record field type used in brushes, such as Double and Point2D Double.
data SomeFieldSType where
SomeFieldSType
:: ( STypeI a, Show a, NFData a, Serialisable a, Interpolatable a, Typeable a )
=> Proxy# a -> SomeFieldSType
-- | Existential type for allowed fields of a brush record.
data SomeBrushFields where
SomeBrushFields
:: forall kvs rec
. ( STypesI kvs
, rec ~ Super.Rec kvs
, Show rec, NFData rec
, Interpolatable rec
, Serialisable rec
)
=> Proxy# kvs -> SomeBrushFields
instance Show SomeBrushFields where
show ( SomeBrushFields ( _ :: Proxy# kvs ) ) = show ( sTypesI @kvs )
-- | Auxiliary datatype used to create a proof that record fields have the required instances.
data BrushFieldsList kvs where
NilFields :: BrushFieldsList '[]
ConsFields
::
( KnownSymbol k
, Show a, NFData a, Serialisable a
, Interpolatable a
, STypesI kvs
, KnownNat ( SuperRecord.RecSize kvs )
, SuperRecord.Has ( k SuperRecord.:= a ': kvs ) k a
)
=> Proxy# k -> Proxy# a -> BrushFieldsList kvs -> BrushFieldsList ( k SuperRecord.:= a ': kvs )
-- | Existential type used in the process of proving that record fields have the required instances.
data SomeBrushFieldsList where
SomeBrushFieldsList
:: ( STypesI kvs
, KnownNat ( SuperRecord.RecSize kvs )
, KnownNat ( SuperRecord.RecSize ( MapDiff kvs ) )
)
=> BrushFieldsList kvs -> SomeBrushFieldsList
-- | Type used to backtrack instance resolution in the SuperRecord library,
-- to witness the required typeclass instances by induction on the record fields.
data SomeClassyBrushFieldsList rts lts where
SomeClassyBrushFieldsList
:: forall rts lts drts dlts
. ( drts ~ MapDiff rts
, dlts ~ MapDiff lts
, KnownNat ( SuperRecord.RecSize rts )
, KnownNat ( SuperRecord.RecSize drts )
, SuperRecord.UnsafeRecBuild rts lts ( ConstC Serialisable )
, SuperRecord.UnsafeRecBuild drts dlts ( ConstC ( Module Double ) )
, SuperRecord.UnsafeRecBuild drts dlts ( ConstC Monoid )
, SuperRecord.RecApply rts lts ( ConstC Show )
, SuperRecord.RecApply rts lts ( ConstC NFData )
, SuperRecord.RecApply rts lts ( ConstC Serialisable )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC ( Module Double ) ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC Semigroup ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC Group ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( HasDiff' rts )
, SuperRecord.TraversalCHelper dlts rts drts ( HasTorsor rts )
)
=> Proxy# rts -> Proxy# lts -> SomeClassyBrushFieldsList rts lts
proveAdapted
:: forall brushFields givenFields usedFields
. ( STypesI brushFields, STypesI givenFields, STypesI usedFields )
=> Maybe ( Dict ( Adapted brushFields givenFields usedFields ) )
proveAdapted
| Just Dict <- proveUnsafeRecBuild @usedFields
, Just Dict <- proveRecApply @brushFields ( size @usedFields )
, Refl <- ( unsafeCoerce Refl :: usedFields :~: ( brushFields `SuperRecord.Intersect` givenFields ) )
= Just Dict
| otherwise
= Nothing
where
-- Provide evidence that each field of "used" appears in "given".
proveUnsafeRecBuild
:: forall lts_used
. ( STypesI lts_used )
=> Maybe ( Dict ( SuperRecord.UnsafeRecBuild usedFields lts_used ( SuperRecord.Has givenFields ) ) )
proveUnsafeRecBuild = case sTypesI @lts_used of
STyNil -> Just Dict
sTyCons@STyCons
| ( _ :: STypes ( k SuperRecord.:= v ': tail_lts_used ) ) <- sTyCons
, SomeIndex ( _ :: Proxy# i ) <- lookupIndex @k @v @givenFields
, Just Dict <- proveUnsafeRecBuild @tail_lts_used
-> Just Dict
| otherwise
-> Nothing
-- Provide evidence whether each field of "brush" appears in "used" or not.
-- Additionally checks that "used" is a subset of "brush".
proveRecApply
:: forall lts_brush
. ( STypesI lts_brush )
=> Int
-> Maybe ( Dict ( SuperRecord.RecApply ( MapFields UniqueField brushFields ) ( MapFields UniqueField lts_brush ) ( UseFieldsInBrush usedFields ) ) )
proveRecApply nbUnseen = case sTypesI @lts_brush of
STyNil -> if nbUnseen < 1 then Just Dict else Nothing
sTyCons@STyCons
| ( _ :: STypes ( k SuperRecord.:= v ': tail_lts_brush ) ) <- sTyCons
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapFields UniqueField brushFields ) :~: SuperRecord.RecSize brushFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( MapFields UniqueField brushFields ) :~: SuperRecord.RecSize tail_lts_brush )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k ( MapFields UniqueField brushFields ) :~: Just ( UniqueField v ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k ( MapFields UniqueField tail_lts_brush ) :~: MapFields UniqueField tail_lts_brush )
-> case lookupIndex @k @v @usedFields of
SomeIndex ( _ :: Proxy# i ) ->
case proveRecApply @tail_lts_brush ( nbUnseen - 1 ) of
Just Dict -> Just Dict
Nothing -> Nothing
NoIndex ->
case proveRecApply @tail_lts_brush nbUnseen of
Just Dict -> Just Dict
Nothing -> Nothing
data LookupResult k v kvs where
NoIndex
:: forall k v kvs
. ( SuperRecord.RecTy k kvs ~ Nothing )
=> LookupResult k v kvs
SomeIndex
:: forall k v kvs i
. ( SuperRecord.RecTy k kvs ~ Just v
, SuperRecord.RecVecIdxPos k kvs ~ i
, KnownNat i
)
=> Proxy# i -> LookupResult k v kvs
lookupIndex
:: forall k v kvs
. ( STypesI kvs, KnownSymbol k, STypeI v )
=> LookupResult k v kvs
lookupIndex = case sTypesI @kvs of
STyNil -> NoIndex
sTyCons@STyCons
| ( _ :: STypes ( ( l SuperRecord.:= w ) ': tail_kvs ) ) <- sTyCons
-> case sameSymbol ( Proxy :: Proxy k ) ( Proxy :: Proxy l ) of
Just Refl
| Just Refl <- eqTy @v @w
, ( index_proxy :: Proxy# i ) <- ( proxy# :: Proxy# ( SuperRecord.RecSize tail_kvs ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k kvs :~: i )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k kvs :~: Just v )
-> SomeIndex index_proxy
_ -> case lookupIndex @k @v @tail_kvs of
NoIndex
| Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k kvs :~: Nothing )
-> NoIndex
SomeIndex ( px_j :: Proxy# j )
| Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k kvs :~: Just v )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k kvs :~: j )
-> SomeIndex px_j
size :: forall kvs. STypesI kvs => Int
size = case sTypesI @kvs of
STyNil -> 0
sTyCons@STyCons
| ( _ :: STypes ( head_kvs ': tail_kvs ) ) <- sTyCons
-> 1 + size @tail_kvs
data Dict c where
Dict :: c => Dict c
type family FromJust ( x :: Maybe a ) where
FromJust ( Just a ) = a
brushUniques <- lift get
let
documentBrushes :: Map Unique Brush
documentBrushes = HashMap.foldrWithKey' ( flip Map.insert ) Map.empty brushUniques
pure ( Document { displayName, mbFilePath, viewportCenter, zoomFactor, documentUnique, documentContent, documentBrushes } )

View file

@ -1,15 +1,46 @@
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
module MetaBrush.Document.Serialise
( Serialisable(..) )
where
-- base
import GHC.TypeNats
( KnownNat )
-- superrecord
import qualified SuperRecord as Super
( Rec )
import qualified SuperRecord
( RecApply, RecSize, UnsafeRecBuild )
import SuperRecord
( ConstC )
-- waargonaut
import qualified Waargonaut.Decode as JSON
( Decoder )
import qualified Waargonaut.Encode as JSON
( Encoder )
-- MetaBrush
import Math.Vector2D
( Point2D, Vector2D )
--------------------------------------------------------------------------------
class Serialisable a where
encoder :: Monad f => JSON.Encoder f a
decoder :: Monad m => JSON.Decoder m a
instance Serialisable Double
instance Serialisable a => Serialisable ( Point2D a )
instance Serialisable a => Serialisable ( Vector2D a )
instance ( SuperRecord.RecApply flds flds ( ConstC Serialisable )
, SuperRecord.UnsafeRecBuild flds flds ( ConstC Serialisable )
, KnownNat ( SuperRecord.RecSize flds )
)
=> Serialisable ( Super.Rec flds )

View file

@ -50,6 +50,8 @@ import Data.Functor.Identity
( Identity(..) )
import Data.Kind
( Type, Constraint )
import Data.List
( intercalate )
import Data.Proxy
( Proxy(..) )
import Data.Type.Equality
@ -178,22 +180,32 @@ instance ( STypesI kvs, STypeI a ) => STypeI ( AdaptableFunction kvs a ) where
data STypes ( kvs :: [ Type ] ) where
STyNil :: STypes '[]
STyCons :: ( kv ~ ( k SuperRecord.:= v ), KnownSymbol k, STypeI v, STypesI kvs ) => STypes ( kv ': kvs )
deriving stock instance Show ( STypes kvs )
instance Show ( STypes kvs ) where
show sTypes = "'[" <> intercalate "," ( showSTypes sTypes ) <> "]"
showSTypes :: STypes kvs -> [ String ]
showSTypes STyNil = []
showSTypes sTyCons@STyCons
| ( _ :: STypes ( ( k SuperRecord.:= v ) ': tail_kvs ) ) <- sTyCons
= ( symbolVal' ( proxy# :: Proxy# k ) <> " := " <> show ( sTypeI @v ) ) : showSTypes ( sTypesI @tail_kvs )
class KnownNat ( SuperRecord.RecSize kvs ) => STypesI kvs where
sTypesI :: STypes kvs
instance STypesI '[] where
sTypesI = STyNil
-- Warning: this instance is somewhat overly general as it doesn't check that the names are ordered.
-- Warning: this instance is somewhat overly general as it doesn't check for lack of duplicates
instance ( kv ~ ( k SuperRecord.:= v ), KnownSymbol k, STypeI v, STypesI kvs ) => STypesI ( kv ': kvs ) where
sTypesI = STyCons
data SomeSType where
SomeSType :: STypeI a => Proxy# a -> SomeSType
instance Show SomeSType where
show ( SomeSType ( _ :: Proxy# a ) ) = show ( sTypeI @a )
instance Eq SomeSType where
( SomeSType ( _ :: Proxy# a ) ) == ( SomeSType ( _ :: Proxy# b ) ) =
case eqTy @a @b of
Just _ -> True
_ -> False
eqSTy :: SType a -> SType b -> Maybe ( a :~: b )
eqSTy sTy_a@SFunTy sTy_b@SFunTy
@ -263,7 +275,10 @@ someSTypes = go ( sTypesI @kvs )
go STyNil = []
go sTyCons@STyCons
| ( _ :: STypes ( ( l SuperRecord.:= v ) ': lvs' ) ) <- sTyCons
= ( Text.pack $ symbolVal' ( proxy# :: Proxy# l ), SomeSType ( proxy# :: Proxy# v ) )
, let
l :: Text
l = Text.pack $ symbolVal' ( proxy# :: Proxy# l )
= ( l, SomeSType ( proxy# :: Proxy# v ) )
: go ( sTypesI @lvs' )
------------------------------------------------

View file

@ -1,6 +1,8 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
@ -12,7 +14,9 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
module MetaBrush.MetaParameter.Interpolation
( Interpolatable(..), MapDiff, HasDiff', HasTorsor )
( Interpolatable(..)
, MapDiff, HasDiff', HasTorsor
)
where
-- base
@ -20,15 +24,23 @@ import Data.Functor.Identity
( Identity(..) )
import Data.Kind
( Type )
import Data.List
( intercalate )
import Data.Monoid
( Sum )
import GHC.Exts
( Proxy#, proxy# )
import GHC.TypeLits
( Symbol )
( Symbol, KnownSymbol, symbolVal' )
-- acts
import Data.Act
( Act(..), Torsor(..) )
-- deepseq
import Control.DeepSeq
( NFData )
-- groups
import Data.Group
( Group(..) )
@ -48,6 +60,8 @@ import Math.Module
( Module(..) )
import Math.Vector2D
( Point2D, Vector2D )
import MetaBrush.MetaParameter.AST
( STypeI(..), STypesI )
--------------------------------------------------------------------------------
@ -78,8 +92,6 @@ type family MapDiff ( kvs :: [ Type ] ) = ( lvs :: [ Type ] ) | lvs -> kvs where
MapDiff ( k SuperRecord.:= v ': kvs ) = ( k SuperRecord.:= Diff v ': MapDiff kvs )
instance ( Monoid ( Super.Rec kvs )
, SuperRecord.RecApply kvs kvs
( Tuple22C ( ConstC Group ) ( SuperRecord.Has kvs ) )

View file

@ -1,3 +1,4 @@
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE BlockArguments #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveTraversable #-}
@ -31,6 +32,10 @@ import Data.Functor.Compose
( Compose(..) )
import Data.Int
( Int32 )
import Data.Maybe
( mapMaybe )
import Data.Type.Equality
( (:~:)(Refl) )
import GHC.Exts
( Proxy#, proxy# )
import GHC.Generics
@ -45,6 +50,10 @@ import Data.Act
)
-- containers
import Data.Map.Strict
( Map )
import qualified Data.Map.Strict as Map
( lookup )
import Data.Sequence
( Seq(..) )
import Data.Set
@ -97,6 +106,8 @@ import Math.Vector2D
( Point2D(..), Vector2D(..) )
import MetaBrush.Asset.Colours
( Colours, ColourRecord(..) )
import MetaBrush.Brush
( Brush(..), BrushReference(..) )
import MetaBrush.Context
( Modifier(..)
, HoldAction(..), PartialPath(..)
@ -107,7 +118,7 @@ import MetaBrush.Document
, Stroke(..), StrokeSpline
, FocusState(..)
, HoverContext(..), Hoverable(..)
, PointData(..), Brush(..), emptyBrush
, PointData(..)
, _selection
, coords
)
@ -118,11 +129,13 @@ import MetaBrush.Document.Serialise
import MetaBrush.Document.Update
( DocChange(..) )
import MetaBrush.MetaParameter.AST
( AdaptableFunction(..), BrushFunction )
( AdaptableFunction(..), BrushFunction, eqTys )
import MetaBrush.MetaParameter.Interpolation
( MapDiff )
import MetaBrush.UI.ToolBar
( Mode(..) )
import MetaBrush.Unique
( Unique )
import MetaBrush.Util
( withRGBA )
@ -159,7 +172,7 @@ renderDocument
renderDocument
cols fitParams mode debug ( viewportWidth, viewportHeight )
modifiers mbMousePos mbHoldEvent mbPartialPath
doc@( Document { viewportCenter = Point2D cx cy, zoomFactor, documentContent = content } )
doc@( Document { viewportCenter = Point2D cx cy, zoomFactor, documentContent = content, documentBrushes } )
= ( mbUpdatedDoc, drawingInstructions )
where
@ -170,7 +183,7 @@ renderDocument
Cairo.translate ( 0.5 + 0.5 * fromIntegral viewportWidth ) ( 0.5 + 0.5 * fromIntegral viewportHeight )
Cairo.scale zoomFactor zoomFactor
Cairo.translate ( -cx ) ( -cy )
for_ strokesWithOutlineInfo
for_ strokesRenderData
( compositeRenders . getCompose . renderStroke cols mbHoverContext mode debug zoomFactor )
renderSelectionRect
Cairo.restore
@ -223,11 +236,11 @@ renderDocument
)
( PointData finalPoint Normal SuperRecord.rnil )
= ( ( Stroke
{ strokeSpline = previewSpline
, strokeVisible = True
, strokeUnique = undefined
, strokeName = undefined
, strokeBrush = emptyBrush
{ strokeSpline = previewSpline
, strokeVisible = True
, strokeUnique = undefined
, strokeName = undefined
, strokeBrushRef = NoBrush
}
)
: strokes content
@ -236,40 +249,8 @@ renderDocument
| otherwise
= ( strokes content, True )
strokesWithOutlineInfo :: [ ( Stroke, Maybe ( Either ( SplinePts Closed ) ( SplinePts Closed, SplinePts Closed ), Seq FitPoint ) ) ]
strokesWithOutlineInfo =
fmap
( \ stroke@(
Stroke
{ strokeSpline = spline :: StrokeSpline clo ( Super.Rec pointFields )
, strokeBrush =
strokeBrush@(
BrushData { brushFunction = ( AdaptableFunction brushFn ) :: BrushFunction brushFields }
)
, ..
} ) ->
if strokeVisible
then
case ( proxy# :: Proxy# ( brushFields `SuperRecord.Intersect` pointFields ) ) of
( _ :: Proxy# usedFields ) ->
let
-- Get the adaptable brush shape (function),
-- specialising it to the type we are using.
toUsedParams :: Super.Rec pointFields -> Super.Rec usedFields
brushShapeFn :: Super.Rec usedFields -> SplinePts Closed
( toUsedParams, brushShapeFn ) = brushFn @pointFields @usedFields
-- Compute the outline using the brush function.
newSpline :: Spline clo CachedStroke ( PointData ( Super.Rec pointFields ) )
outline :: Either ( SplinePts Closed ) ( SplinePts Closed, SplinePts Closed )
fitPts :: Seq FitPoint
( newSpline, outline, fitPts ) =
computeStrokeOutline @( Super.Rec ( MapDiff usedFields ) ) @clo @( Super.Rec usedFields )
fitParams ( toUsedParams . brushParams ) brushShapeFn spline
in ( Stroke { strokeSpline = newSpline, .. } , Just ( outline, fitPts ) )
else ( stroke , Nothing )
)
modifiedStrokes
strokesRenderData :: [ StrokeRenderData ]
strokesRenderData = mapMaybe ( strokeRenderData fitParams documentBrushes ) modifiedStrokes
mbUpdatedDoc :: Maybe Document
mbUpdatedDoc
@ -284,33 +265,83 @@ renderDocument
| otherwise
= Nothing -- TODO: update the original document in this case too (by undoing the modifications)
-- | Utility type to gather information needed to render a stroke.
-- - No outline: just the underlying spline.
-- - Outline: keep track of the function which returns brush shape.
data StrokeRenderData where
StrokeRenderData
:: forall pointParams clo
. ( KnownSplineType clo, Show pointParams )
=> { strokeDataSpline :: !( StrokeSpline clo pointParams ) }
-> StrokeRenderData
StrokeWithOutlineRenderData
:: forall pointParams clo
. ( KnownSplineType clo, Show pointParams )
=> { strokeDataSpline :: !( StrokeSpline clo pointParams )
, strokeOutlineData :: !( Either
( SplinePts Closed )
( SplinePts Closed, SplinePts Closed )
, Seq FitPoint
)
, strokeBrushFunction :: pointParams -> SplinePts Closed
}
-> StrokeRenderData
-- | Compute the data necessary to render a stroke.
--
-- - If the stroke has an associated brush, this consists of:
-- - the path that the brush follows,
-- - the computed outline (using fitting algorithm),
-- - the brush shape function.
-- - Otherwise, this consists of the underlying spline path only.
strokeRenderData :: FitParameters -> Map Unique Brush -> Stroke -> Maybe StrokeRenderData
strokeRenderData fitParams brushes ( Stroke { strokeSpline = spline :: StrokeSpline clo ( Super.Rec pointFields ), .. } ) =
if strokeVisible
then case strokeBrushRef of
BrushReference ( _ :: Proxy# brushFields ) brushUnique
-- TODO: could emit a warning if the following lookup fails.
| Just ( BrushData { brushFunction = AdaptableFunction brushFn :: BrushFunction brushFields' } ) <- Map.lookup brushUnique brushes
-- TODO: the following check could be skipped if we are feeling confident.
, Just Refl <- eqTys @brushFields @brushFields' -- Refl <- ( unsafeCoerce Refl :: brushFields :~: brushFields' )
, ( _ :: Proxy# usedFields ) <- ( proxy# :: Proxy# ( brushFields `SuperRecord.Intersect` pointFields ) )
, let
-- Get the adaptable brush shape (function),
-- specialising it to the type we are using.
toUsedParams :: Super.Rec pointFields -> Super.Rec usedFields
brushShapeFn :: Super.Rec usedFields -> SplinePts Closed
( toUsedParams, brushShapeFn ) = brushFn @pointFields @usedFields
-- Compute the outline using the brush function.
newSpline :: Spline clo CachedStroke ( PointData ( Super.Rec pointFields ) )
outline :: Either ( SplinePts Closed ) ( SplinePts Closed, SplinePts Closed )
fitPts :: Seq FitPoint
( newSpline, outline, fitPts ) =
computeStrokeOutline @( Super.Rec ( MapDiff usedFields ) ) @clo @( Super.Rec usedFields )
fitParams ( toUsedParams . brushParams ) brushShapeFn spline
-> Just $
StrokeWithOutlineRenderData
{ strokeDataSpline = newSpline
, strokeOutlineData = ( outline, fitPts )
, strokeBrushFunction = brushShapeFn . toUsedParams
}
_ -> Just $
StrokeRenderData
{ strokeDataSpline = spline }
else Nothing
renderStroke
:: Colours -> Maybe HoverContext -> Mode -> Bool -> Double
-> ( Stroke, Maybe ( Either ( SplinePts Closed ) ( SplinePts Closed, SplinePts Closed ), Seq FitPoint ) )
-> StrokeRenderData
-> Compose Renders Cairo.Render ()
renderStroke cols@( Colours { brush } ) mbHoverContext mode debug zoom
( Stroke
{ strokeSpline = strokeSpline :: StrokeSpline clo ( Super.Rec pointFields )
, strokeVisible
, strokeBrush = BrushData { brushFunction = ( AdaptableFunction brushFn ) :: BrushFunction brushFields }
}
, mbOutlineData )
| strokeVisible
, ( _ :: Proxy# usedFields ) <- proxy# :: Proxy# ( brushFields `SuperRecord.Intersect` pointFields )
, let
-- Get the adaptable brush shape (function),
-- specialising it to the type we are using.
toUsedParams :: Super.Rec pointFields -> Super.Rec usedFields
brushShapeFn :: Super.Rec usedFields -> SplinePts Closed
( toUsedParams, brushShapeFn ) = brushFn @pointFields @usedFields
= renderStrokeSpline cols mode mbHoverContext zoom
( when ( mode == BrushMode ) . renderBrushShape ( cols { path = brush } ) mbHoverContext ( 1.5 * zoom ) ( brushShapeFn . toUsedParams ) )
strokeSpline
*> for_ mbOutlineData \outlineData ->
Compose blank { renderStrokes = drawOutline cols debug zoom outlineData }
| otherwise
= pure ()
renderStroke cols@( Colours { brush } ) mbHoverContext mode debug zoom = \case
StrokeRenderData { strokeDataSpline } ->
renderStrokeSpline cols mode mbHoverContext zoom ( const ( pure () ) ) strokeDataSpline
StrokeWithOutlineRenderData strokeDataSpline strokeOutlineData strokeBrushFunction ->
renderStrokeSpline cols mode mbHoverContext zoom
( when ( mode == BrushMode )
. renderBrushShape ( cols { path = brush } ) mbHoverContext ( 1.5 * zoom ) strokeBrushFunction
)
strokeDataSpline
*> Compose blank { renderStrokes = drawOutline cols debug zoom strokeOutlineData }
-- | Render a sequence of stroke points.
--