implement a basic brush store

* each stroke stores the brushes it uses by reference,
    so that one can interactively edit brushes
    and see the strokes be updated in real-time
  * when writing to a file, we retrive the brush code
    from the reference, and include that information statically
This commit is contained in:
sheaf 2020-11-14 23:32:23 +01:00
parent 393ef6f06e
commit 1e4bb4bddc
13 changed files with 758 additions and 504 deletions

View file

@ -122,6 +122,7 @@ executable MetaBrush
, MetaBrush.Asset.TickBox , MetaBrush.Asset.TickBox
, MetaBrush.Asset.Tools , MetaBrush.Asset.Tools
, MetaBrush.Asset.WindowIcons , MetaBrush.Asset.WindowIcons
, MetaBrush.Brush
, MetaBrush.Context , MetaBrush.Context
, MetaBrush.Document , MetaBrush.Document
, MetaBrush.Document.Draw , MetaBrush.Document.Draw
@ -175,6 +176,10 @@ executable MetaBrush
^>= 1.4.2.1 ^>= 1.4.2.1
, ghc-typelits-knownnat , ghc-typelits-knownnat
^>= 0.7.3 ^>= 0.7.3
, gi-cairo-render
^>= 0.0.1
, gi-cairo-connector
^>= 0.0.1
, gi-gdk , gi-gdk
>= 3.0.22 && < 3.1 >= 3.0.22 && < 3.1
, gi-gio , gi-gio
@ -185,10 +190,10 @@ executable MetaBrush
^>= 2.0.24 ^>= 2.0.24
, gi-gtk , gi-gtk
>= 3.0.35 && < 3.1 >= 3.0.35 && < 3.1
, gi-cairo-render , gi-gtksource
^>= 0.0.1 >= 3.0.23 && < 3.1
, gi-cairo-connector , hashable
^>= 0.0.1 ^>= 1.3.0.0
, haskell-gi-base , haskell-gi-base
^>= 0.24.3 ^>= 0.24.3
, lens , lens
@ -207,5 +212,7 @@ executable MetaBrush
>= 1.2.3.1 && < 1.2.5 >= 1.2.3.1 && < 1.2.5
, tree-view , tree-view
^>= 0.5 ^>= 0.5
, unordered-containers
>= 0.2.11 && < 0.2.14
, waargonaut , waargonaut
^>= 0.8.0.1 ^>= 0.8.0.1

View file

@ -65,6 +65,8 @@ import qualified GI.Gtk as GTK
-- lens -- lens
import Control.Lens import Control.Lens
( (.~), set ) ( (.~), set )
import Control.Lens.At
( at )
-- stm -- stm
import qualified Control.Concurrent.STM as STM import qualified Control.Concurrent.STM as STM
@ -82,6 +84,16 @@ import qualified SuperRecord
import qualified Data.Text as Text import qualified Data.Text as Text
( pack ) ( pack )
-- transformers
import Control.Monad.Trans.Reader
( runReaderT )
-- unordered-containers
import Data.HashMap.Strict
( HashMap )
import qualified Data.HashMap.Strict as HashMap
( fromList )
-- MetaBrush -- MetaBrush
import Math.Bezier.Cubic.Fit import Math.Bezier.Cubic.Fit
( FitParameters(..) ) ( FitParameters(..) )
@ -99,6 +111,8 @@ import MetaBrush.Asset.Colours
( getColours ) ( getColours )
import MetaBrush.Asset.Logo import MetaBrush.Asset.Logo
( drawLogo ) ( drawLogo )
import MetaBrush.Brush
( Brush, newBrushReference )
import MetaBrush.Context import MetaBrush.Context
( UIElements(..), Variables(..) ( UIElements(..), Variables(..)
, Modifier(..) , Modifier(..)
@ -133,7 +147,7 @@ import MetaBrush.UI.Viewport
( Viewport(..), Ruler(..), createViewport ) ( Viewport(..), Ruler(..), createViewport )
import MetaBrush.Unique import MetaBrush.Unique
( newUniqueSupply ( newUniqueSupply
, Unique, unsafeUnique , Unique, freshUnique
, uniqueMapFromList , uniqueMapFromList
) )
import MetaBrush.Util import MetaBrush.Util
@ -164,17 +178,25 @@ main = do
uniqueSupply <- newUniqueSupply uniqueSupply <- newUniqueSupply
circleBrush <- Asset.Brushes.circle uniqueSupply circleBrush <- Asset.Brushes.circle uniqueSupply
circleBrushUnique <- runReaderT freshUnique uniqueSupply
docUnique <- runReaderT freshUnique uniqueSupply
strokeUnique <- runReaderT freshUnique uniqueSupply
let let
testBrushes :: HashMap Brush Unique
testBrushes = HashMap.fromList
[ ( circleBrush, circleBrushUnique ) ]
testDocuments :: Map Unique DocumentHistory testDocuments :: Map Unique DocumentHistory
testDocuments = fmap newHistory $ uniqueMapFromList testDocuments = fmap newHistory $ uniqueMapFromList
[ emptyDocument "Test" ( unsafeUnique 0 ) [ emptyDocument "Test" docUnique
& ( field' @"documentContent" . field' @"strokes" ) .~ & ( field' @"documentContent" . field' @"strokes" ) .~
[ Stroke [ Stroke
{ strokeName = "Stroke 1" { strokeName = "Stroke 1"
, strokeVisible = True , strokeVisible = True
, strokeUnique = unsafeUnique 10 , strokeUnique = strokeUnique
, strokeBrush = circleBrush , strokeBrushRef = newBrushReference @'[ "r" SuperRecord.:= Double ] circleBrushUnique
, strokeSpline = , strokeSpline =
Spline Spline
{ splineStart = mkPoint ( Point2D 10 -20 ) 2 { splineStart = mkPoint ( Point2D 10 -20 ) 2
@ -186,6 +208,7 @@ main = do
} }
} }
] ]
& ( field' @"documentBrushes" . at circleBrushUnique ) .~ ( Just circleBrush )
] ]
where where
mkPoint :: Point2D Double -> Double -> PointData ( Super.Rec '[ "r" SuperRecord.:= Double ] ) mkPoint :: Point2D Double -> Double -> PointData ( Super.Rec '[ "r" SuperRecord.:= Double ] )
@ -193,6 +216,7 @@ main = do
activeDocumentTVar <- STM.newTVarIO @( Maybe Unique ) Nothing activeDocumentTVar <- STM.newTVarIO @( Maybe Unique ) Nothing
openDocumentsTVar <- STM.newTVarIO @( Map Unique DocumentHistory ) testDocuments openDocumentsTVar <- STM.newTVarIO @( Map Unique DocumentHistory ) testDocuments
brushesTVar <- STM.newTVarIO @( HashMap Brush Unique ) testBrushes
mousePosTVar <- STM.newTVarIO @( Maybe ( Point2D Double ) ) Nothing mousePosTVar <- STM.newTVarIO @( Maybe ( Point2D Double ) ) Nothing
mouseHoldTVar <- STM.newTVarIO @( Maybe HoldAction ) Nothing mouseHoldTVar <- STM.newTVarIO @( Maybe HoldAction ) Nothing
modifiersTVar <- STM.newTVarIO @( Set Modifier ) Set.empty modifiersTVar <- STM.newTVarIO @( Set Modifier ) Set.empty

View file

@ -184,7 +184,9 @@ instance HandleAction OpenFile where
void $ GTK.nativeDialogRun fileChooser void $ GTK.nativeDialogRun fileChooser
filePaths <- GTK.fileChooserGetFilenames fileChooser filePaths <- GTK.fileChooserGetFilenames fileChooser
for_ filePaths \ filePath -> do for_ filePaths \ filePath -> do
mbDoc <- loadDocument uniqueSupply filePath knownBrushes <- STM.atomically $ STM.readTVar brushesTVar
( mbDoc, knownBrushes' ) <- loadDocument uniqueSupply knownBrushes filePath
STM.atomically ( STM.writeTVar brushesTVar knownBrushes' )
case mbDoc of case mbDoc of
Left errMessage -> warningDialog window filePath errMessage Left errMessage -> warningDialog window filePath errMessage
Right doc -> do Right doc -> do
@ -233,7 +235,9 @@ instance HandleAction OpenFolder where
when exists do when exists do
filePaths <- listDirectory folderPath filePaths <- listDirectory folderPath
for_ filePaths \ filePath -> when ( takeExtension filePath == ".mb" ) do for_ filePaths \ filePath -> when ( takeExtension filePath == ".mb" ) do
mbDoc <- loadDocument uniqueSupply ( folderPath </> filePath ) knownBrushes <- STM.atomically $ STM.readTVar brushesTVar
( mbDoc, knownBrushes' ) <- loadDocument uniqueSupply knownBrushes ( folderPath </> filePath )
STM.atomically ( STM.writeTVar brushesTVar knownBrushes' )
case mbDoc of case mbDoc of
Left errMessage -> warningDialog window filePath errMessage Left errMessage -> warningDialog window filePath errMessage
Right doc -> do Right doc -> do

View file

@ -9,16 +9,6 @@
module MetaBrush.Asset.Brushes where module MetaBrush.Asset.Brushes where
-- base
import Data.Kind
( Type )
import Data.Type.Equality
( (:~:)(Refl) )
-- superrecord
import qualified SuperRecord
( (:=) )
-- text -- text
import Data.Text import Data.Text
( Text ) ( Text )
@ -26,10 +16,8 @@ import qualified Data.Text as Text
( unpack ) ( unpack )
-- MetaBrush -- MetaBrush
import MetaBrush.Document import MetaBrush.Brush
( Brush(..) ) ( Brush(..) )
import MetaBrush.MetaParameter.AST
( BrushFunction, STypesI(sTypesI), eqTys )
import MetaBrush.MetaParameter.Driver import MetaBrush.MetaParameter.Driver
( SomeBrushFunction(..), interpretBrush ) ( SomeBrushFunction(..), interpretBrush )
import MetaBrush.Unique import MetaBrush.Unique
@ -37,11 +25,8 @@ import MetaBrush.Unique
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
circle circle :: UniqueSupply -> IO Brush
:: forall circleBrushFields circle uniqueSupply = mkBrush uniqueSupply name code
. ( circleBrushFields ~ '[ "r" SuperRecord.:= Double ] )
=> UniqueSupply -> IO ( Brush circleBrushFields )
circle uniqueSupply = mkBrush @circleBrushFields uniqueSupply name code
where where
name, code :: Text name, code :: Text
name = "Circle" name = "Circle"
@ -57,11 +42,8 @@ circle uniqueSupply = mkBrush @circleBrushFields uniqueSupply name code
\ -- (-r ,-r*c) -- (-r*c,-r ) -> ( 0,-r)\n\ \ -- (-r ,-r*c) -- (-r*c,-r ) -> ( 0,-r)\n\
\ -- ( r*c,-r ) -- ( r ,-r*c) -> . ]" \ -- ( r*c,-r ) -- ( r ,-r*c) -> . ]"
rounded rounded :: UniqueSupply -> IO Brush
:: forall roundedBrushFields rounded uniqueSupply = mkBrush uniqueSupply name code
. ( roundedBrushFields ~ '[ ] ) -- TODO
=> UniqueSupply -> IO ( Brush roundedBrushFields )
rounded uniqueSupply = mkBrush @roundedBrushFields uniqueSupply name code
where where
name, code :: Text name, code :: Text
name = "Rounded quadrilateral" name = "Rounded quadrilateral"
@ -88,21 +70,11 @@ rounded uniqueSupply = mkBrush @roundedBrushFields uniqueSupply name code
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
mkBrush mkBrush :: UniqueSupply -> Text -> Text -> IO Brush
:: forall ( givenBrushFields :: [ Type ] )
. STypesI givenBrushFields
=> UniqueSupply -> Text -> Text
-> IO ( Brush givenBrushFields )
mkBrush uniqSupply brushName brushCode = do mkBrush uniqSupply brushName brushCode = do
( mbBrush, _ ) <- interpretBrush uniqSupply brushCode ( mbBrush, _ ) <- interpretBrush uniqSupply brushCode
case mbBrush of case mbBrush of
Left err -> error ( "Could not interpret '" <> Text.unpack brushName <> "' brush:\n" <> show err ) Left err ->
Right ( SomeBrushFunction ( brushFunction :: BrushFunction inferredBrushFields ) ) -> error ( "Could not interpret '" <> Text.unpack brushName <> "' brush:\n" <> show err )
case eqTys @givenBrushFields @inferredBrushFields of Right ( SomeBrushFunction brushFunction ) ->
Just Refl -> pure ( BrushData { brushName, brushCode, brushFunction } ) pure ( BrushData { brushName, brushCode, brushFunction } )
Nothing ->
error
( "Incorrect record type for '" <> Text.unpack brushName <> "' brush:\n\
\Expected: " <> show ( sTypesI @givenBrushFields ) <> "\n\
\ Actual: " <> show ( sTypesI @inferredBrushFields )
)

370
src/app/MetaBrush/Brush.hs Normal file
View file

@ -0,0 +1,370 @@
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.KnownNat.Solver #-}
module MetaBrush.Brush
( Brush(..), BrushReference(..), newBrushReference
, SomeBrushFields(..), SomeFieldSType(..), reflectBrushFieldsNoDups
)
where
-- base
import Data.Kind
( Type )
import Data.List
( intersect )
import Data.Proxy
( Proxy )
import Data.Type.Equality
( (:~:)(Refl) )
import GHC.Exts
( Proxy#, proxy# )
import GHC.TypeLits
( KnownSymbol, SomeSymbol(..)
, someSymbolVal, symbolVal'
)
import GHC.TypeNats
( KnownNat, SomeNat(..), someNatVal, type (-) )
import Unsafe.Coerce
( unsafeCoerce )
-- deepseq
import Control.DeepSeq
( NFData(..), deepseq )
-- groups
import Data.Group
( Group )
-- hashable
import Data.Hashable
( Hashable(..) )
-- superrecord
import qualified SuperRecord as Super
( Rec )
import qualified SuperRecord
( Has, RecTy, (:=)
, RecSize, RecApply(..), RecVecIdxPos, UnsafeRecBuild(..)
, TraversalCHelper, RemoveAccessTo, Intersect
)
import SuperRecord
( ConstC, Tuple22C )
-- text
import Data.Text
( Text )
import qualified Data.Text as Text
( pack, unpack )
-- unordered-containers
import Data.HashMap.Strict
( HashMap )
import qualified Data.HashMap.Strict as HashMap
( fromList, lookup )
-- MetaBrush
import Math.Module
( Module )
import Math.Vector2D
( Point2D )
import {-# SOURCE #-} MetaBrush.Document.Serialise
( Serialisable )
import MetaBrush.MetaParameter.AST
( SType(..), STypeI(..), SomeSType(..), STypes(..), STypesI(..), someSTypes
, Adapted, BrushFunction
, MapFields, UniqueField, UseFieldsInBrush
)
import MetaBrush.MetaParameter.Interpolation
( Interpolatable(..), MapDiff, HasDiff', HasTorsor )
import MetaBrush.Unique
( Unique )
--------------------------------------------------------------------------------
data Brush where
BrushData
:: forall brushFields
. ( STypesI brushFields )
=>
{ brushName :: !Text
, brushCode :: !Text
, brushFunction :: !( BrushFunction brushFields )
}
-> Brush
instance Show Brush where
show ( BrushData { brushName } ) = Text.unpack brushName
instance NFData Brush where
rnf ( BrushData { brushName, brushCode } )
= deepseq brushCode
$ rnf brushName
instance Eq Brush where
BrushData name1 code1 _ == BrushData name2 code2 _ = name1 == name2 && code1 == code2
instance Ord Brush where
compare ( BrushData name1 code1 _ ) ( BrushData name2 code2 _ ) = compare ( name1, code1 ) ( name2, code2 )
instance Hashable Brush where
hashWithSalt salt ( BrushData { brushName, brushCode } ) =
hashWithSalt ( hashWithSalt salt brushName ) brushCode
data BrushReference pointFields where
NoBrush :: BrushReference pointFields
BrushReference
:: forall brushFields pointFields usedFields brushParams usedParams
. ( brushParams ~ Super.Rec brushFields, STypesI brushFields
, usedParams ~ Super.Rec usedFields
, Interpolatable usedParams
, Adapted brushFields pointFields usedFields
)
=> Proxy# brushFields
-> Unique
-> BrushReference pointFields
instance Show ( BrushReference pointFields ) where
show NoBrush = "NoBrush"
show ( BrushReference ( _ :: Proxy# brushFields ) uniq ) =
"BrushReference @(" <> show ( sTypesI @brushFields ) <> ") " <> show uniq
instance NFData ( BrushReference pointFields ) where
rnf NoBrush = ()
rnf ( BrushReference _ unique ) = rnf unique
newBrushReference
:: forall brushFields pointFields
. ( STypesI brushFields, STypesI pointFields )
=> Unique
-> BrushReference pointFields
newBrushReference uniq = case proveAdapted @brushFields @pointFields of
Dict -> BrushReference ( proxy# :: Proxy# brushFields ) uniq
--------------------------------------------------------------------------------
-- Instance dictionary passing machinery.
data Dict c where
Dict :: c => Dict c
proveAdapted
:: forall brushFields givenFields usedFields drts_used
. ( STypesI brushFields, STypesI givenFields
, usedFields ~ ( brushFields `SuperRecord.Intersect` givenFields )
, drts_used ~ MapDiff usedFields
)
=> Dict ( Adapted brushFields givenFields usedFields
, Interpolatable ( Super.Rec usedFields )
)
proveAdapted = case go ( sTypesI @brushFields ) of { Dict -> Dict }
where
brushFields, givenFields, usedFields :: [ ( Text, SomeSType ) ]
brushFields = someSTypes @brushFields
givenFields = someSTypes @givenFields
usedFields = intersect brushFields givenFields
nbUsedFields :: Int
nbUsedFields = length usedFields
givenIxFieldsMap, usedIxFieldsMap :: HashMap Text Int
givenIxFieldsMap = listToEndIndexMap givenFields
usedIxFieldsMap = listToEndIndexMap usedFields
go :: forall lts_brush lts_used dlts_used
. ( lts_used ~ ( lts_brush `SuperRecord.Intersect` givenFields )
, dlts_used ~ MapDiff lts_used
)
=> STypes lts_brush
-> Dict
( SuperRecord.UnsafeRecBuild usedFields lts_used ( SuperRecord.Has givenFields )
, SuperRecord.RecApply ( MapFields UniqueField brushFields ) ( MapFields UniqueField lts_brush ) ( UseFieldsInBrush usedFields )
, SuperRecord.UnsafeRecBuild drts_used dlts_used ( ConstC Monoid )
, SuperRecord.UnsafeRecBuild drts_used dlts_used ( ConstC ( Module Double ) )
, SuperRecord.RecApply drts_used dlts_used ( Tuple22C ( ConstC ( Module Double ) ) ( SuperRecord.Has drts_used ) )
, SuperRecord.RecApply drts_used dlts_used ( Tuple22C ( ConstC Semigroup ) ( SuperRecord.Has drts_used ) )
, SuperRecord.RecApply drts_used dlts_used ( Tuple22C ( ConstC Group ) ( SuperRecord.Has drts_used ) )
, SuperRecord.RecApply drts_used dlts_used ( HasDiff' usedFields )
, SuperRecord.TraversalCHelper dlts_used usedFields drts_used ( HasTorsor usedFields )
)
go STyNil
| SomeNat ( _ :: Proxy nbUsedFields ) <- someNatVal ( fromIntegral nbUsedFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize usedFields :~: nbUsedFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize drts_used :~: nbUsedFields )
= Dict
go sTyCons@STyCons
| SomeNat ( _ :: Proxy nbUsedFields ) <- someNatVal ( fromIntegral nbUsedFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize usedFields :~: nbUsedFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize drts_used :~: nbUsedFields )
, ( _ :: STypes ( ( k SuperRecord.:= v ) ': tail_lts_brush ) ) <- sTyCons
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapFields UniqueField brushFields ) :~: SuperRecord.RecSize brushFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k ( MapFields UniqueField brushFields ) :~: Just ( UniqueField v ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k ( MapFields UniqueField tail_lts_brush ) :~: MapFields UniqueField tail_lts_brush )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( MapFields UniqueField brushFields ) :~: SuperRecord.RecSize tail_lts_brush )
, let
k :: Text
k = Text.pack ( symbolVal' ( proxy# :: Proxy# k ) )
= case HashMap.lookup k usedIxFieldsMap of
Just k_used_indexFromEnd
| SomeNat ( _ :: Proxy k_used_indexFromEnd ) <- someNatVal ( fromIntegral k_used_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k lts_used :~: k_used_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k usedFields :~: k_used_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k drts_used :~: k_used_indexFromEnd )
, Just k_given_indexFromEnd <- HashMap.lookup k givenIxFieldsMap
, SomeNat ( _ :: Proxy k_given_indexFromEnd ) <- someNatVal ( fromIntegral k_given_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k givenFields :~: k_given_indexFromEnd )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k lts_used :~: Just v )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k usedFields :~: Just v )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k givenFields :~: Just v )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k drts_used :~: Just ( Diff v ) )
, ( _ :: Proxy# tail_lts_used ) <- ( proxy# :: Proxy# ( tail_lts_brush `SuperRecord.Intersect` givenFields ) )
, ( _ :: Proxy# tail_dlts_used ) <- ( proxy# :: Proxy# ( MapDiff tail_lts_used ) )
, Refl <- ( unsafeCoerce Refl :: lts_used :~: ( ( k SuperRecord.:= v ) ': tail_lts_used ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize tail_lts_used :~: ( SuperRecord.RecSize lts_used - 1 ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize tail_dlts_used :~: SuperRecord.RecSize tail_lts_used )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k tail_dlts_used :~: tail_dlts_used )
, Just Dict <- interpolatableDict @v
-> case go ( sTypesI @tail_lts_brush ) of { Dict -> Dict }
_
| Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k lts_used :~: Nothing )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k usedFields :~: Nothing )
, Refl <- ( unsafeCoerce Refl :: lts_used :~: ( tail_lts_brush `SuperRecord.Intersect` givenFields ) )
-> case go ( sTypesI @tail_lts_brush ) of { Dict -> Dict }
interpolatableDict :: forall t. STypeI t => Maybe ( Dict ( Interpolatable t ) )
interpolatableDict =
case sTypeI @t of
STyDouble -> Just Dict
sTyPoint@STyPoint
| ( _ :: SType ( Point2D c ) ) <- sTyPoint
, STyDouble <- sTypeI @c
-> Just Dict
_ -> Nothing
listToEndIndexMap :: ( Eq k, Hashable k ) => [ ( k, v ) ] -> HashMap k Int
listToEndIndexMap kvs =
HashMap.fromList
$ zipWith ( \ ( fieldName, _ ) index -> ( fieldName, lg - index - 1 ) )
kvs
[ 0 .. ]
where
lg :: Int
lg = length kvs
-- | Reflects a list of brush fields to the type level.
--
-- Assumes the input list has no duplicate field names,
-- but they don't have to be sorted.
reflectBrushFieldsNoDups :: [ ( Text, SomeFieldSType ) ] -> SomeBrushFields
reflectBrushFieldsNoDups = fromSomeBrushFieldsList . mkBrushFieldsList
where
mkBrushFieldsList :: [ ( Text, SomeFieldSType ) ] -> SomeBrushFieldsList
mkBrushFieldsList [] = SomeBrushFieldsList NilFields
mkBrushFieldsList ( ( k, SomeFieldSType ( _ :: Proxy# v ) ) : kvs )
| SomeBrushFieldsList ( kvs_list :: BrushFieldsList kvs ) <- mkBrushFieldsList kvs
, SomeSymbol ( _ :: Proxy k ) <- someSymbolVal ( Text.unpack k )
-- deduce RecSize ( MapDiff kvs ) ~ RecSize kvs
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapDiff kvs ) :~: SuperRecord.RecSize kvs )
-- compute indexing into record list (with SuperRecord, the index is the number of fields remaining)
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( ( k SuperRecord.:= v ) : kvs ) :~: SuperRecord.RecSize kvs )
= SomeBrushFieldsList ( ConsFields ( proxy# :: Proxy# k ) ( proxy# :: Proxy# v ) kvs_list )
fromSomeBrushFieldsList :: SomeBrushFieldsList -> SomeBrushFields
fromSomeBrushFieldsList ( SomeBrushFieldsList ( kvs :: BrushFieldsList kvs ) ) = case go @kvs kvs of
SomeClassyBrushFieldsList ( _ :: Proxy# kvs ) ( _ :: Proxy# kvs ) ->
SomeBrushFields ( proxy# :: Proxy# kvs )
where
go :: forall ( rts :: [ Type ] ) ( lts :: [ Type ] )
. ( STypesI rts, KnownNat ( SuperRecord.RecSize rts ), KnownNat ( SuperRecord.RecSize ( MapDiff rts ) ) )
=> BrushFieldsList lts -> SomeClassyBrushFieldsList rts lts
go NilFields =
SomeClassyBrushFieldsList ( proxy# :: Proxy# rts ) ( proxy# :: Proxy# '[] )
go ( ConsFields ( _ :: Proxy# k ) ( _ :: Proxy# a ) kvs' )
| ( SomeClassyBrushFieldsList _ ( _ :: Proxy# lts' ) ) <- go @rts kvs'
-- Assert some facts that result from the field names being distinct:
-- - current field name does not re-occur later on
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k lts' :~: lts' )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k ( MapDiff lts' ) :~: MapDiff lts' )
-- - looking up the type associated with the current field name returns the current type
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k rts :~: Just a )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k ( MapDiff rts ) :~: Just ( Diff a ) )
-- - MapDiff preserves length
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapDiff lts' ) :~: SuperRecord.RecSize lts' )
-- - compute the index (which is the number of fields remaining, i.e. the indexing starts counting from 0 from the right)
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k rts :~: SuperRecord.RecSize lts' )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( MapDiff rts ) :~: SuperRecord.RecSize lts' )
= SomeClassyBrushFieldsList ( proxy# :: Proxy# rts ) ( proxy# :: Proxy# ( ( k SuperRecord.:= a ) ': lts' ) )
-- | Existential type over an allowed record field type used in brushes, such as Double and Point2D Double.
data SomeFieldSType where
SomeFieldSType
:: ( STypeI a, Show a, NFData a, Serialisable a, Interpolatable a )
=> Proxy# a -> SomeFieldSType
-- | Existential type for allowed fields of a brush record.
data SomeBrushFields where
SomeBrushFields
:: forall kvs rec
. ( STypesI kvs
, rec ~ Super.Rec kvs
, Show rec, NFData rec
, Interpolatable rec
, Serialisable rec
)
=> Proxy# kvs -> SomeBrushFields
instance Show SomeBrushFields where
show ( SomeBrushFields ( _ :: Proxy# kvs ) ) = show ( sTypesI @kvs )
-- | Auxiliary datatype used to create a proof that record fields have the required instances.
data BrushFieldsList kvs where
NilFields :: BrushFieldsList '[]
ConsFields
::
( KnownSymbol k
, Show a, NFData a, Serialisable a
, Interpolatable a
, STypesI kvs
, KnownNat ( SuperRecord.RecSize kvs )
, SuperRecord.Has ( k SuperRecord.:= a ': kvs ) k a
)
=> Proxy# k -> Proxy# a -> BrushFieldsList kvs -> BrushFieldsList ( k SuperRecord.:= a ': kvs )
-- | Existential type used in the process of proving that record fields have the required instances.
data SomeBrushFieldsList where
SomeBrushFieldsList
:: ( STypesI kvs
, KnownNat ( SuperRecord.RecSize kvs )
, KnownNat ( SuperRecord.RecSize ( MapDiff kvs ) )
)
=> BrushFieldsList kvs -> SomeBrushFieldsList
-- | Type used to backtrack instance resolution in the SuperRecord library,
-- to witness the required typeclass instances by induction on the record fields.
data SomeClassyBrushFieldsList rts lts where
SomeClassyBrushFieldsList
:: forall rts lts drts dlts
. ( drts ~ MapDiff rts
, dlts ~ MapDiff lts
, KnownNat ( SuperRecord.RecSize rts )
, KnownNat ( SuperRecord.RecSize drts )
, SuperRecord.UnsafeRecBuild rts lts ( ConstC Serialisable )
, SuperRecord.UnsafeRecBuild drts dlts ( ConstC ( Module Double ) )
, SuperRecord.UnsafeRecBuild drts dlts ( ConstC Monoid )
, SuperRecord.RecApply rts lts ( ConstC Show )
, SuperRecord.RecApply rts lts ( ConstC NFData )
, SuperRecord.RecApply rts lts ( ConstC Serialisable )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC ( Module Double ) ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC Semigroup ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC Group ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( HasDiff' rts )
, SuperRecord.TraversalCHelper dlts rts drts ( HasTorsor rts )
)
=> Proxy# rts -> Proxy# lts -> SomeClassyBrushFieldsList rts lts

View file

@ -28,6 +28,10 @@ import qualified GI.Gtk as GTK
import qualified Control.Concurrent.STM.TVar as STM import qualified Control.Concurrent.STM.TVar as STM
( TVar ) ( TVar )
-- unordered-containers
import Data.HashMap.Strict
( HashMap )
-- MetaBrush -- MetaBrush
import Math.Bezier.Cubic.Fit import Math.Bezier.Cubic.Fit
( FitParameters ) ( FitParameters )
@ -35,6 +39,8 @@ import Math.Vector2D
( Point2D ) ( Point2D )
import MetaBrush.Asset.Colours import MetaBrush.Asset.Colours
( Colours ) ( Colours )
import MetaBrush.Brush
( Brush )
import MetaBrush.Document.Draw import MetaBrush.Document.Draw
( DrawAnchor ) ( DrawAnchor )
import MetaBrush.Document.History import MetaBrush.Document.History
@ -73,6 +79,7 @@ data Variables
{ uniqueSupply :: !UniqueSupply { uniqueSupply :: !UniqueSupply
, activeDocumentTVar :: !( STM.TVar ( Maybe Unique ) ) , activeDocumentTVar :: !( STM.TVar ( Maybe Unique ) )
, openDocumentsTVar :: !( STM.TVar ( Map Unique DocumentHistory ) ) , openDocumentsTVar :: !( STM.TVar ( Map Unique DocumentHistory ) )
, brushesTVar :: !( STM.TVar ( HashMap Brush Unique ) )
, mousePosTVar :: !( STM.TVar ( Maybe ( Point2D Double ) ) ) , mousePosTVar :: !( STM.TVar ( Maybe ( Point2D Double ) ) )
, mouseHoldTVar :: !( STM.TVar ( Maybe HoldAction ) ) , mouseHoldTVar :: !( STM.TVar ( Maybe HoldAction ) )
, modifiersTVar :: !( STM.TVar ( Set Modifier ) ) , modifiersTVar :: !( STM.TVar ( Set Modifier ) )

View file

@ -8,6 +8,7 @@
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-} {-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
@ -26,7 +27,6 @@ module MetaBrush.Document
, emptyDocument , emptyDocument
, Stroke(..), StrokeSpline, _strokeSpline, overStrokeSpline , Stroke(..), StrokeSpline, _strokeSpline, overStrokeSpline
, PointData(..), BrushPointData(..), DiffPointData(..) , PointData(..), BrushPointData(..), DiffPointData(..)
, Brush(..), emptyBrush
, FocusState(..), Hoverable(..), HoverContext(..) , FocusState(..), Hoverable(..), HoverContext(..)
, Guide(..) , Guide(..)
, _selection, _coords, coords , _selection, _coords, coords
@ -39,8 +39,6 @@ import Data.Coerce
( coerce ) ( coerce )
import Data.Functor.Identity import Data.Functor.Identity
( Identity(..) ) ( Identity(..) )
import Data.Kind
( Type )
import Data.Semigroup import Data.Semigroup
( Arg(..), Min(..), ArgMin ) ( Arg(..), Min(..), ArgMin )
import GHC.Generics import GHC.Generics
@ -81,14 +79,10 @@ import Control.Concurrent.STM
-- superrecord -- superrecord
import qualified SuperRecord as Super import qualified SuperRecord as Super
( Rec ) ( Rec )
import qualified SuperRecord
( Intersect, rnil )
-- text -- text
import Data.Text import Data.Text
( Text ) ( Text )
import qualified Data.Text as Text
( unpack )
-- transformers -- transformers
import Control.Monad.Trans.Reader import Control.Monad.Trans.Reader
@ -96,7 +90,7 @@ import Control.Monad.Trans.Reader
-- MetaBrush -- MetaBrush
import Math.Bezier.Spline import Math.Bezier.Spline
( Spline(..), KnownSplineType, Curves(..) ) ( Spline(..), KnownSplineType )
import Math.Bezier.Stroke import Math.Bezier.Stroke
( CachedStroke ) ( CachedStroke )
import Math.Module import Math.Module
@ -107,10 +101,12 @@ import Math.Module
) )
import Math.Vector2D import Math.Vector2D
( Point2D(..), Vector2D(..) ) ( Point2D(..), Vector2D(..) )
import MetaBrush.Brush
( Brush, BrushReference )
import {-# SOURCE #-} MetaBrush.Document.Serialise import {-# SOURCE #-} MetaBrush.Document.Serialise
( Serialisable(..) ) ( Serialisable(..) )
import MetaBrush.MetaParameter.AST import MetaBrush.MetaParameter.AST
( STypesI, Adapted, AdaptableFunction(..), BrushFunction ) ( STypesI(..) )
import MetaBrush.MetaParameter.Interpolation import MetaBrush.MetaParameter.Interpolation
( Interpolatable(..) ) -- + orphan instances ( Interpolatable(..) ) -- + orphan instances
import MetaBrush.UI.Viewport import MetaBrush.UI.Viewport
@ -145,6 +141,7 @@ data Document
, zoomFactor :: !Double , zoomFactor :: !Double
, documentUnique :: Unique , documentUnique :: Unique
, documentContent :: !DocumentContent , documentContent :: !DocumentContent
, documentBrushes :: !( Map Unique Brush )
} }
deriving stock ( Show, Generic ) deriving stock ( Show, Generic )
deriving anyclass NFData deriving anyclass NFData
@ -166,28 +163,23 @@ data Stroke where
Stroke Stroke
:: ( KnownSplineType clo :: ( KnownSplineType clo
, pointParams ~ Super.Rec pointFields, STypesI pointFields , pointParams ~ Super.Rec pointFields, STypesI pointFields
, brushParams ~ Super.Rec brushFields, STypesI brushFields
, usedParams ~ Super.Rec usedFields , STypesI usedFields
, usedFields ~ ( brushFields `SuperRecord.Intersect` pointFields )
, Show brushParams, NFData brushParams
, Show pointParams, NFData pointParams , Show pointParams, NFData pointParams
, Interpolatable pointParams , Interpolatable pointParams
, Interpolatable usedParams
, Serialisable pointParams , Serialisable pointParams
, Adapted brushFields pointFields usedFields
) )
=> =>
{ strokeName :: Text { strokeName :: !Text
, strokeVisible :: !Bool , strokeVisible :: !Bool
, strokeUnique :: Unique , strokeUnique :: Unique
, strokeBrush :: Brush brushFields , strokeBrushRef :: !( BrushReference pointFields )
, strokeSpline :: !( StrokeSpline clo pointParams ) , strokeSpline :: !( StrokeSpline clo pointParams )
} }
-> Stroke -> Stroke
deriving stock instance Show Stroke deriving stock instance Show Stroke
instance NFData Stroke where instance NFData Stroke where
rnf ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline } ) rnf ( Stroke { strokeName, strokeVisible, strokeUnique, strokeBrushRef, strokeSpline } )
= deepseq strokeSpline = deepseq strokeSpline
. deepseq strokeBrushRef
. deepseq strokeUnique . deepseq strokeUnique
. deepseq strokeVisible . deepseq strokeVisible
$ rnf strokeName $ rnf strokeName
@ -224,21 +216,6 @@ overStrokeSpline
overStrokeSpline f = coerce ( _strokeSpline @Identity ( coerce . f ) ) overStrokeSpline f = coerce ( _strokeSpline @Identity ( coerce . f ) )
data Brush ( brushFields :: [ Type ] )
= BrushData
{ brushName :: !Text
, brushCode :: !Text
, brushFunction :: !( BrushFunction brushFields )
}
instance Show ( Brush brushFields ) where
show ( BrushData { brushName } ) = Text.unpack brushName
-- Brush parameters using open records.
emptyBrush :: Brush '[]
emptyBrush = BrushData "Empty brush" ""
( AdaptableFunction ( const SuperRecord.rnil, const $ Spline ( Point2D 0 0 ) NoCurves ) )
data PointData params data PointData params
= PointData = PointData
{ pointCoords :: !( Point2D Double ) { pointCoords :: !( Point2D Double )
@ -285,6 +262,7 @@ emptyDocument docName unique =
, strokes = [] , strokes = []
, guides = Map.empty , guides = Map.empty
} }
, documentBrushes = Map.empty
} }
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------

View file

@ -65,10 +65,11 @@ import Math.Module
( squaredNorm ) ( squaredNorm )
import Math.Vector2D import Math.Vector2D
( Point2D(..), Vector2D(..) ) ( Point2D(..), Vector2D(..) )
import MetaBrush.Brush
( BrushReference(NoBrush) )
import MetaBrush.Document import MetaBrush.Document
( Document(..), Stroke(..), StrokeSpline ( Document(..), Stroke(..), StrokeSpline
, FocusState(..), PointData(..) , FocusState(..), PointData(..)
, emptyBrush
, _selection, _strokeSpline , _selection, _strokeSpline
, coords, overStrokeSpline , coords, overStrokeSpline
) )
@ -122,7 +123,7 @@ getOrCreateDrawAnchor uniqueSupply c doc@( Document { zoomFactor } ) =
, strokeVisible = True , strokeVisible = True
, strokeUnique = uniq , strokeUnique = uniq
, strokeSpline = newSpline , strokeSpline = newSpline
, strokeBrush = emptyBrush , strokeBrushRef = NoBrush
} }
newDoc' :: Document newDoc' :: Document
newDoc' newDoc'

View file

@ -1,6 +1,4 @@
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE BlockArguments #-} {-# LANGUAGE BlockArguments #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-} {-# LANGUAGE GADTs #-}
@ -12,12 +10,9 @@
{-# LANGUAGE PolyKinds #-} {-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-} {-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.KnownNat.Solver #-}
module MetaBrush.Document.Serialise module MetaBrush.Document.Serialise
( Serialisable(..) ( Serialisable(..)
, documentToJSON, documentFromJSON , documentToJSON, documentFromJSON
@ -27,7 +22,7 @@ module MetaBrush.Document.Serialise
-- base -- base
import Control.Arrow import Control.Arrow
( (&&&) ) ( (&&&), first )
import Control.Monad import Control.Monad
( unless ) ( unless )
import qualified Data.Bifunctor as Bifunctor import qualified Data.Bifunctor as Bifunctor
@ -38,22 +33,12 @@ import Data.Functor.Contravariant
( contramap ) ( contramap )
import Data.Functor.Identity import Data.Functor.Identity
( Identity(..) ) ( Identity(..) )
import Data.Kind
( Type )
import Data.List
( sortBy )
import Data.Ord
( comparing )
import Data.Proxy
( Proxy(Proxy) )
import Data.Type.Equality import Data.Type.Equality
( (:~:)(Refl) ) ( (:~:)(Refl) )
import Data.Typeable
( Typeable, eqT )
import GHC.Exts import GHC.Exts
( Proxy#, proxy# ) ( Proxy#, proxy# )
import GHC.TypeLits import GHC.TypeLits
( symbolVal', KnownSymbol, SomeSymbol(..), someSymbolVal, sameSymbol ) ( symbolVal', KnownSymbol )
import GHC.TypeNats import GHC.TypeNats
( KnownNat ) ( KnownNat )
import Unsafe.Coerce import Unsafe.Coerce
@ -77,16 +62,12 @@ import qualified Data.ByteString.Lazy.Builder as Lazy.ByteString.Builder
import Data.Map.Strict import Data.Map.Strict
( Map ) ( Map )
import qualified Data.Map.Strict as Map import qualified Data.Map.Strict as Map
( fromList, elems ) ( elems, empty, fromList, insert, lookup )
import Data.Sequence import Data.Sequence
( Seq ) ( Seq )
import qualified Data.Sequence as Seq import qualified Data.Sequence as Seq
( fromList ) ( fromList )
-- deepseq
import Control.DeepSeq
( NFData(..) )
-- directory -- directory
import System.Directory import System.Directory
( canonicalizePath, createDirectoryIfMissing, doesFileExist ) ( canonicalizePath, createDirectoryIfMissing, doesFileExist )
@ -99,10 +80,6 @@ import System.FilePath
import Data.Generics.Product.Typed import Data.Generics.Product.Typed
( HasType(typed) ) ( HasType(typed) )
-- groups
import Data.Group
( Group )
-- lens -- lens
import Control.Lens import Control.Lens
( view ) ( view )
@ -123,13 +100,12 @@ import qualified Control.Concurrent.STM as STM
import qualified SuperRecord as Super import qualified SuperRecord as Super
( Rec ) ( Rec )
import qualified SuperRecord import qualified SuperRecord
( Has, RecTy, (:=), FldProxy(..) ( FldProxy(..)
, RecSize, RecApply(..), RecVecIdxPos, UnsafeRecBuild(..) , RecSize, RecApply(..), UnsafeRecBuild(..)
, TraversalCHelper, RemoveAccessTo, Intersect
, reflectRec , reflectRec
) )
import SuperRecord import SuperRecord
( ConstC, Tuple22C ) ( ConstC )
-- text -- text
import Data.Text import Data.Text
@ -140,10 +116,18 @@ import qualified Data.Text as Text
-- transformers -- transformers
import Control.Monad.IO.Class import Control.Monad.IO.Class
( MonadIO(liftIO) ) ( MonadIO(liftIO) )
import Control.Monad.Trans.Reader
( runReaderT )
import Control.Monad.Trans.Class import Control.Monad.Trans.Class
( MonadTrans(lift) ) ( MonadTrans(lift) )
import Control.Monad.Trans.Reader
( runReaderT )
import Control.Monad.Trans.State.Strict
( StateT, runStateT, get, put )
-- unordered-containers
import Data.HashMap.Strict
( HashMap )
import qualified Data.HashMap.Strict as HashMap
( foldrWithKey', insert, lookup )
-- waargonaut -- waargonaut
import qualified Waargonaut.Attoparsec as JSON.Decoder import qualified Waargonaut.Attoparsec as JSON.Decoder
@ -172,8 +156,12 @@ import qualified Waargonaut.Prettier as JSON
( prettyJson ) ( prettyJson )
import qualified Waargonaut.Prettier as TonyMorris import qualified Waargonaut.Prettier as TonyMorris
( Natural ) ( Natural )
import qualified Waargonaut.Types.JObject as JSON
( MapLikeObj )
import Waargonaut.Types.Json import Waargonaut.Types.Json
( Json ) ( Json )
import qualified Waargonaut.Types.Whitespace as JSON
( WS )
-- MetaBrush -- MetaBrush
import qualified Math.Bezier.Cubic as Cubic import qualified Math.Bezier.Cubic as Cubic
@ -186,26 +174,25 @@ import Math.Bezier.Spline
) )
import Math.Bezier.Stroke import Math.Bezier.Stroke
( CachedStroke(..) ) ( CachedStroke(..) )
import Math.Module
( Module )
import Math.Vector2D import Math.Vector2D
( Point2D(..), Vector2D(..), Segment ) ( Point2D(..), Vector2D(..), Segment )
import MetaBrush.Brush
( Brush(..), BrushReference(..), newBrushReference
, SomeBrushFields(..), SomeFieldSType(..), reflectBrushFieldsNoDups
)
import MetaBrush.Document import MetaBrush.Document
( Document(..), DocumentContent(..), Guide(..) ( Document(..), DocumentContent(..), Guide(..)
, Stroke(..), StrokeSpline , Stroke(..), StrokeSpline
, PointData(..), Brush(..) , FocusState(..) , PointData(..), FocusState(..)
) )
import MetaBrush.MetaParameter.AST import MetaBrush.MetaParameter.AST
( SType(..), STypeI(..), STypes(..), STypesI(..) ( SType(..), STypeI(..), STypesI(..)
, SomeSType(..), someSTypes , SomeSType(..), someSTypes
, Adapted, AdaptableFunction(..), BrushFunction , AdaptableFunction(..), BrushFunction
, MapFields, UniqueField, UseFieldsInBrush
, eqTy, eqTys , eqTy, eqTys
) )
import MetaBrush.MetaParameter.Driver import MetaBrush.MetaParameter.Driver
( SomeBrushFunction(..), interpretBrush ) ( SomeBrushFunction(..), interpretBrush )
import MetaBrush.MetaParameter.Interpolation
( Interpolatable(..), MapDiff, HasDiff', HasTorsor )
import MetaBrush.Unique import MetaBrush.Unique
( Unique, UniqueSupply, freshUnique ) ( Unique, UniqueSupply, freshUnique )
@ -226,9 +213,16 @@ documentToJSON
four = unsafeCoerce ( 4 :: Integer ) four = unsafeCoerce ( 4 :: Integer )
-- | Parse a document from JSON (given by a strict bytestring). -- | Parse a document from JSON (given by a strict bytestring).
documentFromJSON :: UniqueSupply -> Maybe FilePath -> Strict.ByteString -> IO ( Either JSON.DecodeError Document ) --
documentFromJSON uniqueSupply mfp -- Updates the store of brushes by adding any new brushes contained in the document.
= fmap ( Bifunctor.first fst ) documentFromJSON
:: UniqueSupply -> HashMap Brush Unique
-> Maybe FilePath
-> Strict.ByteString
-> IO ( Either JSON.DecodeError Document, HashMap Brush Unique )
documentFromJSON uniqueSupply brushUniques mfp
= fmap ( first $ Bifunctor.first fst )
. ( `runStateT` brushUniques )
. JSON.Decoder.decodeAttoparsecByteString ( decodeDocument uniqueSupply mfp ) . JSON.Decoder.decodeAttoparsecByteString ( decodeDocument uniqueSupply mfp )
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
@ -247,12 +241,14 @@ saveDocument path doc = do
atomicReplaceFile Nothing path' ( documentToJSON doc ) atomicReplaceFile Nothing path' ( documentToJSON doc )
-- | Load a MetaBrush document. -- | Load a MetaBrush document.
loadDocument :: UniqueSupply -> FilePath -> IO ( Either String Document ) --
loadDocument uniqueSupply fp = do -- Updates the store of brushes by adding any new brushes contained in the document.
loadDocument :: UniqueSupply -> HashMap Brush Unique -> FilePath -> IO ( Either String Document, HashMap Brush Unique )
loadDocument uniqueSupply brushUniques fp = do
exists <- doesFileExist fp exists <- doesFileExist fp
if exists if exists
then Bifunctor.first show <$> ( documentFromJSON uniqueSupply ( Just fp ) =<< Strict.ByteString.readFile fp ) then first ( Bifunctor.first show ) <$> ( documentFromJSON uniqueSupply brushUniques ( Just fp ) =<< Strict.ByteString.readFile fp )
else pure ( Left $ "No file at " <> fp ) else pure ( Left $ "No file at " <> fp, brushUniques )
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
@ -551,6 +547,7 @@ encodeSomeSType = JSON.Encoder.mapLikeObj \ ( SomeSType ( _ :: Proxy# ty ) ) ->
-> JSON.Encoder.atKey' "tag" JSON.Encoder.text "adaptableFun" -> JSON.Encoder.atKey' "tag" JSON.Encoder.text "adaptableFun"
. JSON.Encoder.atKey' "fields" encodeFieldTypes ( someSTypes @kvs ) . JSON.Encoder.atKey' "fields" encodeFieldTypes ( someSTypes @kvs )
. JSON.Encoder.atKey' "res" encodeSomeSType ( SomeSType ( proxy# :: Proxy# res ) ) . JSON.Encoder.atKey' "res" encodeSomeSType ( SomeSType ( proxy# :: Proxy# res ) )
{- {-
decodeSomeSType :: Monad m => JSON.Decoder m SomeSType decodeSomeSType :: Monad m => JSON.Decoder m SomeSType
decodeSomeSType = do decodeSomeSType = do
@ -583,7 +580,9 @@ decodeSomeSType = do
( SomeBrushFields ( _ :: Proxy# kvs ) ) <- JSON.Decoder.atKey "fields" decodeFieldTypes ( SomeBrushFields ( _ :: Proxy# kvs ) ) <- JSON.Decoder.atKey "fields" decodeFieldTypes
( SomeSType ( _ :: Proxy# a ) ) <- JSON.Decoder.atKey "res" decodeSomeSType ( SomeSType ( _ :: Proxy# a ) ) <- JSON.Decoder.atKey "res" decodeSomeSType
pure ( SomeSType ( proxy# :: Proxy# ( AdaptableFunction kvs a ) ) ) pure ( SomeSType ( proxy# :: Proxy# ( AdaptableFunction kvs a ) ) )
_ -> throwError ( JSON.ParseFailed $ "Unsupported record field type with tag " <> tag )
-} -}
decodeSomeFieldSType :: Monad m => JSON.Decoder m SomeFieldSType decodeSomeFieldSType :: Monad m => JSON.Decoder m SomeFieldSType
decodeSomeFieldSType = do decodeSomeFieldSType = do
tag <- JSON.Decoder.atKey "tag" JSON.Decoder.text tag <- JSON.Decoder.atKey "tag" JSON.Decoder.text
@ -591,24 +590,23 @@ decodeSomeFieldSType = do
"double" -> pure ( SomeFieldSType ( proxy# :: Proxy# Double ) ) "double" -> pure ( SomeFieldSType ( proxy# :: Proxy# Double ) )
"point" -> do "point" -> do
( SomeFieldSType ( _ :: Proxy# a ) ) <- JSON.Decoder.atKey "coords" decodeSomeFieldSType ( SomeFieldSType ( _ :: Proxy# a ) ) <- JSON.Decoder.atKey "coords" decodeSomeFieldSType
case eqT @a @Double of case eqTy @a @Double of
Just Refl -> pure ( SomeFieldSType ( proxy# :: Proxy# ( Point2D Double ) ) ) Just Refl -> pure ( SomeFieldSType ( proxy# :: Proxy# ( Point2D Double ) ) )
Nothing -> throwError ( JSON.ParseFailed "Point2D: non-Double coordinate type" ) Nothing -> throwError ( JSON.ParseFailed "Point2D: non-Double coordinate type" )
_ -> throwError ( JSON.ParseFailed $ "Unsupported record field type with tag " <> tag ) _ -> throwError ( JSON.ParseFailed $ "Unsupported record field type with tag " <> tag )
encodeFieldTypes :: Monad f => JSON.Encoder f ( [ ( Text, SomeSType ) ] )
encodeFieldTypes :: Monad f => JSON.Encoder f [ ( Text, SomeSType ) ]
encodeFieldTypes = JSON.Encoder.keyValueTupleFoldable encodeSomeSType encodeFieldTypes = JSON.Encoder.keyValueTupleFoldable encodeSomeSType
decodeFieldTypes :: Monad m => JSON.Decoder m SomeBrushFields decodeFieldTypes :: Monad m => JSON.Decoder m SomeBrushFields
decodeFieldTypes = do decodeFieldTypes = do
fields <- JSON.Decoder.objectAsKeyValues JSON.Decoder.text decodeSomeFieldSType fields <- JSON.Decoder.objectAsKeyValues JSON.Decoder.text decodeSomeFieldSType
let let
sortedFields :: [ ( Text, SomeFieldSType ) ]
sortedFields = sortBy ( comparing fst ) fields
duplicates :: [ Text ] duplicates :: [ Text ]
duplicates = duplicatesAcc [] [] sortedFields duplicates = duplicatesAcc [] [] fields
duplicatesAcc :: [ Text ] -> [ Text ] -> [ ( Text, SomeFieldSType ) ] -> [ Text ] duplicatesAcc :: [ Text ] -> [ Text ] -> [ ( Text, whatever ) ] -> [ Text ]
duplicatesAcc _ dups [] = dups duplicatesAcc _ dups [] = dups
duplicatesAcc seen dups ( ( k, _ ) : kvs ) duplicatesAcc seen dups ( ( k, _ ) : kvs )
| k `elem` seen | k `elem` seen
@ -616,89 +614,37 @@ decodeFieldTypes = do
| otherwise | otherwise
= duplicatesAcc ( k : seen ) dups kvs = duplicatesAcc ( k : seen ) dups kvs
case duplicates of case duplicates of
[] -> pure ( mkBrushFields sortedFields ) [] -> pure ( reflectBrushFieldsNoDups fields )
[dup] -> throwError ( JSON.ParseFailed $ "Duplicate field name " <> dup <> " in brush record type" ) [dup] -> throwError ( JSON.ParseFailed $ "Duplicate field name " <> dup <> " in brush record type" )
dups -> throwError ( JSON.ParseFailed $ "Duplicate field names in brush record type:\n" <> Text.unwords dups ) dups -> throwError ( JSON.ParseFailed $ "Duplicate field names in brush record type:\n" <> Text.unwords dups )
where
mkBrushFields :: [ ( Text, SomeFieldSType ) ] -> SomeBrushFields
mkBrushFields = fromSomeBrushFieldsList . mkBrushFieldsList
mkBrushFieldsList :: [ ( Text, SomeFieldSType ) ] -> SomeBrushFieldsList
mkBrushFieldsList [] = SomeBrushFieldsList NilFields
mkBrushFieldsList ( ( k, SomeFieldSType ( _ :: Proxy# v ) ) : kvs )
| SomeBrushFieldsList ( kvs_list :: BrushFieldsList kvs ) <- mkBrushFieldsList kvs
, SomeSymbol ( _ :: Proxy k ) <- someSymbolVal ( Text.unpack k )
-- deduce RecSize ( MapDiff kvs ) ~ RecSize kvs
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapDiff kvs ) :~: SuperRecord.RecSize kvs )
-- compute indexing into record list (with SuperRecord, the index is the number of fields remaining)
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( ( k SuperRecord.:= v ) : kvs ) :~: SuperRecord.RecSize kvs )
= SomeBrushFieldsList ( ConsFields ( proxy# :: Proxy# k ) ( proxy# :: Proxy# v ) kvs_list )
fromSomeBrushFieldsList :: SomeBrushFieldsList -> SomeBrushFields
fromSomeBrushFieldsList ( SomeBrushFieldsList ( kvs :: BrushFieldsList kvs ) ) = case go @kvs kvs of
SomeClassyBrushFieldsList ( _ :: Proxy# kvs ) ( _ :: Proxy# kvs ) ->
SomeBrushFields ( proxy# :: Proxy# kvs )
where
go :: forall ( rts :: [ Type ] ) ( lts :: [ Type ] )
. ( STypesI rts, KnownNat ( SuperRecord.RecSize rts ), KnownNat ( SuperRecord.RecSize ( MapDiff rts ) ) )
=> BrushFieldsList lts -> SomeClassyBrushFieldsList rts lts
go NilFields =
SomeClassyBrushFieldsList ( proxy# :: Proxy# rts ) ( proxy# :: Proxy# '[] )
go ( ConsFields ( _ :: Proxy# k ) ( _ :: Proxy# a ) kvs' )
| ( SomeClassyBrushFieldsList _ ( _ :: Proxy# lts' ) ) <- go @rts kvs'
-- Assert some facts that result from the field names being distinct:
-- - current field name does not re-occur later on
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k lts' :~: lts' )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k ( MapDiff lts' ) :~: MapDiff lts' )
-- - looking up the type associated with the current field name returns the current type
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k rts :~: Just a )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k ( MapDiff rts ) :~: Just ( Diff a ) )
-- - MapDiff preserves length
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapDiff lts' ) :~: SuperRecord.RecSize lts' )
-- - compute the index (which is the number of fields remaining, i.e. the indexing starts counting from 0 from the right)
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k rts :~: SuperRecord.RecSize lts' )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( MapDiff rts ) :~: SuperRecord.RecSize lts' )
= SomeClassyBrushFieldsList ( proxy# :: Proxy# rts ) ( proxy# :: Proxy# ( ( k SuperRecord.:= a ) ': lts' ) )
encodeBrush :: Applicative f => JSON.Encoder f Brush
encodeBrush :: Applicative f => JSON.Encoder f ( Brush brushParams ) encodeBrush = JSON.Encoder.mapLikeObj
encodeBrush = JSON.Encoder.mapLikeObj \ ( BrushData { brushName, brushCode } ) -> \ ( BrushData { brushName, brushCode } ) ->
JSON.Encoder.atKey' "name" JSON.Encoder.text brushName JSON.Encoder.atKey' "name" JSON.Encoder.text brushName
. JSON.Encoder.atKey' "code" JSON.Encoder.text brushCode . JSON.Encoder.atKey' "code" JSON.Encoder.text brushCode
decodeBrush decodeBrush :: MonadIO m => UniqueSupply -> JSON.Decoder m Brush
:: forall m flds. ( MonadIO m, STypesI flds )
=> UniqueSupply
-> JSON.Decoder m ( Brush flds )
decodeBrush uniqSupply = do decodeBrush uniqSupply = do
brushName <- JSON.Decoder.atKey "name" JSON.Decoder.text brushName <- JSON.Decoder.atKey "name" JSON.Decoder.text
brushCode <- JSON.Decoder.atKey "code" JSON.Decoder.text brushCode <- JSON.Decoder.atKey "code" JSON.Decoder.text
( mbBrush, _ ) <- lift ( liftIO $ interpretBrush uniqSupply brushCode ) ( mbBrush, _ ) <- lift ( liftIO $ interpretBrush uniqSupply brushCode )
case mbBrush of case mbBrush of
Left err -> throwError ( JSON.ParseFailed ( "Failed to interpret brush code:\n" <> ( Text.pack $ show err ) ) ) Left err -> throwError ( JSON.ParseFailed ( "Failed to interpret brush code:\n" <> ( Text.pack $ show err ) ) )
Right ( SomeBrushFunction ( brushFunction :: BrushFunction brushParams ) ) -> Right ( SomeBrushFunction brushFunction ) ->
case eqTys @flds @brushParams of pure ( BrushData { brushName, brushCode, brushFunction } )
Just Refl -> pure ( BrushData { brushName, brushCode, brushFunction } )
Nothing ->
throwError
( JSON.ParseFailed $
"Brush has unexpected input record type:\n\
\Expected: " <> Text.pack ( show ( sTypesI @flds ) ) <> "\n\
\ Actual: " <> Text.pack ( show ( sTypesI @brushParams ) )
)
encodeStroke :: Monad f => JSON.Encoder f Stroke encodeStroke :: Monad f => Map Unique Brush -> JSON.Encoder f Stroke
encodeStroke = JSON.Encoder.mapLikeObj encodeStroke brushes = JSON.Encoder.mapLikeObj
\ ( Stroke \ ( Stroke
{ strokeName { strokeName
, strokeVisible , strokeVisible
, strokeSpline = strokeSpline :: StrokeSpline clo ( Super.Rec pointFields ) , strokeSpline = strokeSpline :: StrokeSpline clo ( Super.Rec pointFields )
, strokeBrush = strokeBrush :: Brush brushFields , strokeBrushRef
} }
) -> ) ->
let let
@ -706,38 +652,58 @@ encodeStroke = JSON.Encoder.mapLikeObj
closed = case ssplineType @clo of closed = case ssplineType @clo of
SClosed -> True SClosed -> True
SOpen -> False SOpen -> False
mbEncodeBrush :: JSON.MapLikeObj JSON.WS Json -> JSON.MapLikeObj JSON.WS Json
mbEncodeBrush = case strokeBrushRef of
BrushReference ( _ :: Proxy# brushFields1 ) unique ->
case Map.lookup unique brushes of
Nothing -> error ( "encodeStroke: no brush with unique " <> show unique <> "in environment" )
Just ( brush@BrushData { brushName, brushFunction = ( _ :: BrushFunction brushFields2 ) } ) ->
case eqTys @brushFields1 @brushFields2 of
Nothing -> error
( "encodeStroke: brush '" <> Text.unpack brushName <> "' has unexpected field types.\n\
\Expected: " <> show ( sTypesI @brushFields1 ) <> "\n\
\ Actual: " <> show ( sTypesI @brushFields2 )
)
Just Refl -> JSON.Encoder.atKey' "brush" encodeBrush brush
NoBrush -> id
in in
JSON.Encoder.atKey' "name" JSON.Encoder.text strokeName JSON.Encoder.atKey' "name" JSON.Encoder.text strokeName
. JSON.Encoder.atKey' "visible" JSON.Encoder.bool strokeVisible . JSON.Encoder.atKey' "visible" JSON.Encoder.bool strokeVisible
. JSON.Encoder.atKey' "closed" JSON.Encoder.bool closed . JSON.Encoder.atKey' "closed" JSON.Encoder.bool closed
. JSON.Encoder.atKey' "brushFields" encodeFieldTypes ( someSTypes @brushFields )
. JSON.Encoder.atKey' "pointFields" encodeFieldTypes ( someSTypes @pointFields ) . JSON.Encoder.atKey' "pointFields" encodeFieldTypes ( someSTypes @pointFields )
. JSON.Encoder.atKey' "usedFields" encodeFieldTypes ( someSTypes @( brushFields `SuperRecord.Intersect` pointFields ) ) . mbEncodeBrush
. JSON.Encoder.atKey' "brush" encodeBrush strokeBrush
. JSON.Encoder.atKey' "spline" ( encodeSpline encodePointData ) strokeSpline . JSON.Encoder.atKey' "spline" ( encodeSpline encodePointData ) strokeSpline
decodeStroke :: forall m. MonadIO m => UniqueSupply -> JSON.Decoder ( StateT ( HashMap Brush Unique ) m ) Stroke
decodeStroke :: forall m. MonadIO m => UniqueSupply -> JSON.Decoder m Stroke
decodeStroke uniqueSupply = do decodeStroke uniqueSupply = do
brushHashMap <- lift get
strokeName <- JSON.Decoder.atKey "name" JSON.Decoder.text strokeName <- JSON.Decoder.atKey "name" JSON.Decoder.text
strokeVisible <- JSON.Decoder.atKey "visible" JSON.Decoder.bool strokeVisible <- JSON.Decoder.atKey "visible" JSON.Decoder.bool
strokeUnique <- lift ( liftIO . STM.atomically $ runReaderT freshUnique uniqueSupply ) strokeUnique <- lift ( liftIO . STM.atomically $ runReaderT freshUnique uniqueSupply )
strokeClosed <- JSON.Decoder.atKey "closed" JSON.Decoder.bool strokeClosed <- JSON.Decoder.atKey "closed" JSON.Decoder.bool
SomeBrushFields ( _ :: Proxy# brushFields ) <- JSON.Decoder.atKey "brushFields" decodeFieldTypes
SomeBrushFields ( _ :: Proxy# pointFields ) <- JSON.Decoder.atKey "pointFields" decodeFieldTypes SomeBrushFields ( _ :: Proxy# pointFields ) <- JSON.Decoder.atKey "pointFields" decodeFieldTypes
SomeBrushFields ( _ :: Proxy# usedFields ) <- JSON.Decoder.atKey "usedFields" decodeFieldTypes mbBrush <- JSON.Decoder.atKeyOptional "brush" ( decodeBrush uniqueSupply )
strokeBrush <- JSON.Decoder.atKey "brush" ( decodeBrush @m @brushFields uniqueSupply ) strokeBrushRef <-
case proveAdapted @brushFields @pointFields @usedFields of case mbBrush of
Nothing -> throwError ( JSON.ParseFailed "Stroke: 'usedFields' is not equal to 'brushFields `Intersect` pointFields'" ) Nothing -> pure NoBrush
Just Dict -> Just ( brush@BrushData { brushFunction = _ :: BrushFunction brushFields } ) -> do
case strokeClosed of brushUnique <-
True -> do case HashMap.lookup brush brushHashMap of
Nothing -> do
brushUnique <- lift ( liftIO . STM.atomically $ runReaderT freshUnique uniqueSupply )
lift $ put ( HashMap.insert brush brushUnique brushHashMap )
pure brushUnique
Just brushUnique -> pure brushUnique
pure ( newBrushReference @brushFields brushUnique )
if strokeClosed
then do
strokeSpline <- JSON.Decoder.atKey "spline" ( decodeSpline @Closed @( PointData ( Super.Rec pointFields ) ) decodePointData ) strokeSpline <- JSON.Decoder.atKey "spline" ( decodeSpline @Closed @( PointData ( Super.Rec pointFields ) ) decodePointData )
pure ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline, strokeBrush } ) pure ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline, strokeBrushRef } )
False -> do else do
strokeSpline <- JSON.Decoder.atKey "spline" ( decodeSpline @Open @( PointData ( Super.Rec pointFields ) ) decodePointData ) strokeSpline <- JSON.Decoder.atKey "spline" ( decodeSpline @Open @( PointData ( Super.Rec pointFields ) ) decodePointData )
pure ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline, strokeBrush } ) pure ( Stroke { strokeName, strokeVisible, strokeUnique, strokeSpline, strokeBrushRef } )
encodeGuide :: Applicative f => JSON.Encoder f Guide encodeGuide :: Applicative f => JSON.Encoder f Guide
encodeGuide = JSON.Encoder.mapLikeObj \ ( Guide { guidePoint, guideNormal } ) -> encodeGuide = JSON.Encoder.mapLikeObj \ ( Guide { guidePoint, guideNormal } ) ->
@ -756,12 +722,15 @@ decodeGuide uniqueSupply = do
encodeDocumentContent :: Applicative f => JSON.Encoder f DocumentContent encodeDocumentContent :: Applicative f => Map Unique Brush -> JSON.Encoder f DocumentContent
encodeDocumentContent = JSON.Encoder.mapLikeObj \ ( Content { guides, strokes } ) -> encodeDocumentContent brushes = JSON.Encoder.mapLikeObj \ ( Content { guides, strokes } ) ->
JSON.Encoder.atKey' "guides" ( encodeUniqueMap encodeGuide ) guides JSON.Encoder.atKey' "guides" ( encodeUniqueMap encodeGuide ) guides
. JSON.Encoder.atKey' "strokes" ( JSON.Encoder.list encodeStroke ) strokes . JSON.Encoder.atKey' "strokes" ( JSON.Encoder.list $ encodeStroke brushes ) strokes
decodeDocumentContent :: MonadIO m => UniqueSupply -> JSON.Decoder m DocumentContent decodeDocumentContent
:: MonadIO m
=> UniqueSupply
-> JSON.Decoder ( StateT ( HashMap Brush Unique ) m ) DocumentContent
decodeDocumentContent uniqueSupply = do decodeDocumentContent uniqueSupply = do
let let
unsavedChanges :: Bool unsavedChanges :: Bool
@ -775,189 +744,22 @@ decodeDocumentContent uniqueSupply = do
encodeDocument :: Applicative f => JSON.Encoder f Document encodeDocument :: Applicative f => JSON.Encoder f Document
encodeDocument = JSON.Encoder.mapLikeObj \ ( Document { displayName, viewportCenter, zoomFactor, documentContent } ) -> encodeDocument = JSON.Encoder.mapLikeObj
\ ( Document { displayName, viewportCenter, zoomFactor, documentContent, documentBrushes } ) ->
JSON.Encoder.atKey' "name" JSON.Encoder.text displayName JSON.Encoder.atKey' "name" JSON.Encoder.text displayName
. JSON.Encoder.atKey' "center" ( encoder @( Point2D Double ) ) viewportCenter . JSON.Encoder.atKey' "center" ( encoder @( Point2D Double ) ) viewportCenter
. JSON.Encoder.atKey' "zoom" ( encoder @Double ) zoomFactor . JSON.Encoder.atKey' "zoom" ( encoder @Double ) zoomFactor
. JSON.Encoder.atKey' "content" encodeDocumentContent documentContent . JSON.Encoder.atKey' "content" ( encodeDocumentContent documentBrushes ) documentContent
decodeDocument :: MonadIO m => UniqueSupply -> Maybe FilePath -> JSON.Decoder m Document decodeDocument :: MonadIO m => UniqueSupply -> Maybe FilePath -> JSON.Decoder ( StateT ( HashMap Brush Unique ) m ) Document
decodeDocument uniqueSupply mbFilePath = do decodeDocument uniqueSupply mbFilePath = do
displayName <- JSON.Decoder.atKey "name" JSON.Decoder.text displayName <- JSON.Decoder.atKey "name" JSON.Decoder.text
viewportCenter <- JSON.Decoder.atKey "center" ( decoder @( Point2D Double ) ) viewportCenter <- JSON.Decoder.atKey "center" ( decoder @( Point2D Double ) )
zoomFactor <- JSON.Decoder.atKey "zoom" ( decoder @Double ) zoomFactor <- JSON.Decoder.atKey "zoom" ( decoder @Double )
documentUnique <- lift ( liftIO . STM.atomically $ runReaderT freshUnique uniqueSupply ) documentUnique <- lift ( liftIO . STM.atomically $ runReaderT freshUnique uniqueSupply )
documentContent <- JSON.Decoder.atKey "content" ( decodeDocumentContent uniqueSupply ) documentContent <- JSON.Decoder.atKey "content" ( decodeDocumentContent uniqueSupply )
pure ( Document { displayName, mbFilePath, viewportCenter, zoomFactor, documentUnique, documentContent } ) brushUniques <- lift get
let
-------------------------------------------------------------------------------- documentBrushes :: Map Unique Brush
-- Various auxiliary types. documentBrushes = HashMap.foldrWithKey' ( flip Map.insert ) Map.empty brushUniques
pure ( Document { displayName, mbFilePath, viewportCenter, zoomFactor, documentUnique, documentContent, documentBrushes } )
-- | Existential type over an allowed record field type used in brushes, such as Double and Point2D Double.
data SomeFieldSType where
SomeFieldSType
:: ( STypeI a, Show a, NFData a, Serialisable a, Interpolatable a, Typeable a )
=> Proxy# a -> SomeFieldSType
-- | Existential type for allowed fields of a brush record.
data SomeBrushFields where
SomeBrushFields
:: forall kvs rec
. ( STypesI kvs
, rec ~ Super.Rec kvs
, Show rec, NFData rec
, Interpolatable rec
, Serialisable rec
)
=> Proxy# kvs -> SomeBrushFields
instance Show SomeBrushFields where
show ( SomeBrushFields ( _ :: Proxy# kvs ) ) = show ( sTypesI @kvs )
-- | Auxiliary datatype used to create a proof that record fields have the required instances.
data BrushFieldsList kvs where
NilFields :: BrushFieldsList '[]
ConsFields
::
( KnownSymbol k
, Show a, NFData a, Serialisable a
, Interpolatable a
, STypesI kvs
, KnownNat ( SuperRecord.RecSize kvs )
, SuperRecord.Has ( k SuperRecord.:= a ': kvs ) k a
)
=> Proxy# k -> Proxy# a -> BrushFieldsList kvs -> BrushFieldsList ( k SuperRecord.:= a ': kvs )
-- | Existential type used in the process of proving that record fields have the required instances.
data SomeBrushFieldsList where
SomeBrushFieldsList
:: ( STypesI kvs
, KnownNat ( SuperRecord.RecSize kvs )
, KnownNat ( SuperRecord.RecSize ( MapDiff kvs ) )
)
=> BrushFieldsList kvs -> SomeBrushFieldsList
-- | Type used to backtrack instance resolution in the SuperRecord library,
-- to witness the required typeclass instances by induction on the record fields.
data SomeClassyBrushFieldsList rts lts where
SomeClassyBrushFieldsList
:: forall rts lts drts dlts
. ( drts ~ MapDiff rts
, dlts ~ MapDiff lts
, KnownNat ( SuperRecord.RecSize rts )
, KnownNat ( SuperRecord.RecSize drts )
, SuperRecord.UnsafeRecBuild rts lts ( ConstC Serialisable )
, SuperRecord.UnsafeRecBuild drts dlts ( ConstC ( Module Double ) )
, SuperRecord.UnsafeRecBuild drts dlts ( ConstC Monoid )
, SuperRecord.RecApply rts lts ( ConstC Show )
, SuperRecord.RecApply rts lts ( ConstC NFData )
, SuperRecord.RecApply rts lts ( ConstC Serialisable )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC ( Module Double ) ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC Semigroup ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( Tuple22C ( ConstC Group ) ( SuperRecord.Has drts ) )
, SuperRecord.RecApply drts dlts ( HasDiff' rts )
, SuperRecord.TraversalCHelper dlts rts drts ( HasTorsor rts )
)
=> Proxy# rts -> Proxy# lts -> SomeClassyBrushFieldsList rts lts
proveAdapted
:: forall brushFields givenFields usedFields
. ( STypesI brushFields, STypesI givenFields, STypesI usedFields )
=> Maybe ( Dict ( Adapted brushFields givenFields usedFields ) )
proveAdapted
| Just Dict <- proveUnsafeRecBuild @usedFields
, Just Dict <- proveRecApply @brushFields ( size @usedFields )
, Refl <- ( unsafeCoerce Refl :: usedFields :~: ( brushFields `SuperRecord.Intersect` givenFields ) )
= Just Dict
| otherwise
= Nothing
where
-- Provide evidence that each field of "used" appears in "given".
proveUnsafeRecBuild
:: forall lts_used
. ( STypesI lts_used )
=> Maybe ( Dict ( SuperRecord.UnsafeRecBuild usedFields lts_used ( SuperRecord.Has givenFields ) ) )
proveUnsafeRecBuild = case sTypesI @lts_used of
STyNil -> Just Dict
sTyCons@STyCons
| ( _ :: STypes ( k SuperRecord.:= v ': tail_lts_used ) ) <- sTyCons
, SomeIndex ( _ :: Proxy# i ) <- lookupIndex @k @v @givenFields
, Just Dict <- proveUnsafeRecBuild @tail_lts_used
-> Just Dict
| otherwise
-> Nothing
-- Provide evidence whether each field of "brush" appears in "used" or not.
-- Additionally checks that "used" is a subset of "brush".
proveRecApply
:: forall lts_brush
. ( STypesI lts_brush )
=> Int
-> Maybe ( Dict ( SuperRecord.RecApply ( MapFields UniqueField brushFields ) ( MapFields UniqueField lts_brush ) ( UseFieldsInBrush usedFields ) ) )
proveRecApply nbUnseen = case sTypesI @lts_brush of
STyNil -> if nbUnseen < 1 then Just Dict else Nothing
sTyCons@STyCons
| ( _ :: STypes ( k SuperRecord.:= v ': tail_lts_brush ) ) <- sTyCons
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecSize ( MapFields UniqueField brushFields ) :~: SuperRecord.RecSize brushFields )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k ( MapFields UniqueField brushFields ) :~: SuperRecord.RecSize tail_lts_brush )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k ( MapFields UniqueField brushFields ) :~: Just ( UniqueField v ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RemoveAccessTo k ( MapFields UniqueField tail_lts_brush ) :~: MapFields UniqueField tail_lts_brush )
-> case lookupIndex @k @v @usedFields of
SomeIndex ( _ :: Proxy# i ) ->
case proveRecApply @tail_lts_brush ( nbUnseen - 1 ) of
Just Dict -> Just Dict
Nothing -> Nothing
NoIndex ->
case proveRecApply @tail_lts_brush nbUnseen of
Just Dict -> Just Dict
Nothing -> Nothing
data LookupResult k v kvs where
NoIndex
:: forall k v kvs
. ( SuperRecord.RecTy k kvs ~ Nothing )
=> LookupResult k v kvs
SomeIndex
:: forall k v kvs i
. ( SuperRecord.RecTy k kvs ~ Just v
, SuperRecord.RecVecIdxPos k kvs ~ i
, KnownNat i
)
=> Proxy# i -> LookupResult k v kvs
lookupIndex
:: forall k v kvs
. ( STypesI kvs, KnownSymbol k, STypeI v )
=> LookupResult k v kvs
lookupIndex = case sTypesI @kvs of
STyNil -> NoIndex
sTyCons@STyCons
| ( _ :: STypes ( ( l SuperRecord.:= w ) ': tail_kvs ) ) <- sTyCons
-> case sameSymbol ( Proxy :: Proxy k ) ( Proxy :: Proxy l ) of
Just Refl
| Just Refl <- eqTy @v @w
, ( index_proxy :: Proxy# i ) <- ( proxy# :: Proxy# ( SuperRecord.RecSize tail_kvs ) )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k kvs :~: i )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k kvs :~: Just v )
-> SomeIndex index_proxy
_ -> case lookupIndex @k @v @tail_kvs of
NoIndex
| Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k kvs :~: Nothing )
-> NoIndex
SomeIndex ( px_j :: Proxy# j )
| Refl <- ( unsafeCoerce Refl :: SuperRecord.RecTy k kvs :~: Just v )
, Refl <- ( unsafeCoerce Refl :: SuperRecord.RecVecIdxPos k kvs :~: j )
-> SomeIndex px_j
size :: forall kvs. STypesI kvs => Int
size = case sTypesI @kvs of
STyNil -> 0
sTyCons@STyCons
| ( _ :: STypes ( head_kvs ': tail_kvs ) ) <- sTyCons
-> 1 + size @tail_kvs
data Dict c where
Dict :: c => Dict c
type family FromJust ( x :: Maybe a ) where
FromJust ( Just a ) = a

View file

@ -1,15 +1,46 @@
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
module MetaBrush.Document.Serialise module MetaBrush.Document.Serialise
( Serialisable(..) ) ( Serialisable(..) )
where where
-- base
import GHC.TypeNats
( KnownNat )
-- superrecord
import qualified SuperRecord as Super
( Rec )
import qualified SuperRecord
( RecApply, RecSize, UnsafeRecBuild )
import SuperRecord
( ConstC )
-- waargonaut -- waargonaut
import qualified Waargonaut.Decode as JSON import qualified Waargonaut.Decode as JSON
( Decoder ) ( Decoder )
import qualified Waargonaut.Encode as JSON import qualified Waargonaut.Encode as JSON
( Encoder ) ( Encoder )
-- MetaBrush
import Math.Vector2D
( Point2D, Vector2D )
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
class Serialisable a where class Serialisable a where
encoder :: Monad f => JSON.Encoder f a encoder :: Monad f => JSON.Encoder f a
decoder :: Monad m => JSON.Decoder m a decoder :: Monad m => JSON.Decoder m a
instance Serialisable Double
instance Serialisable a => Serialisable ( Point2D a )
instance Serialisable a => Serialisable ( Vector2D a )
instance ( SuperRecord.RecApply flds flds ( ConstC Serialisable )
, SuperRecord.UnsafeRecBuild flds flds ( ConstC Serialisable )
, KnownNat ( SuperRecord.RecSize flds )
)
=> Serialisable ( Super.Rec flds )

View file

@ -50,6 +50,8 @@ import Data.Functor.Identity
( Identity(..) ) ( Identity(..) )
import Data.Kind import Data.Kind
( Type, Constraint ) ( Type, Constraint )
import Data.List
( intercalate )
import Data.Proxy import Data.Proxy
( Proxy(..) ) ( Proxy(..) )
import Data.Type.Equality import Data.Type.Equality
@ -178,22 +180,32 @@ instance ( STypesI kvs, STypeI a ) => STypeI ( AdaptableFunction kvs a ) where
data STypes ( kvs :: [ Type ] ) where data STypes ( kvs :: [ Type ] ) where
STyNil :: STypes '[] STyNil :: STypes '[]
STyCons :: ( kv ~ ( k SuperRecord.:= v ), KnownSymbol k, STypeI v, STypesI kvs ) => STypes ( kv ': kvs ) STyCons :: ( kv ~ ( k SuperRecord.:= v ), KnownSymbol k, STypeI v, STypesI kvs ) => STypes ( kv ': kvs )
deriving stock instance Show ( STypes kvs ) instance Show ( STypes kvs ) where
show sTypes = "'[" <> intercalate "," ( showSTypes sTypes ) <> "]"
showSTypes :: STypes kvs -> [ String ]
showSTypes STyNil = []
showSTypes sTyCons@STyCons
| ( _ :: STypes ( ( k SuperRecord.:= v ) ': tail_kvs ) ) <- sTyCons
= ( symbolVal' ( proxy# :: Proxy# k ) <> " := " <> show ( sTypeI @v ) ) : showSTypes ( sTypesI @tail_kvs )
class KnownNat ( SuperRecord.RecSize kvs ) => STypesI kvs where class KnownNat ( SuperRecord.RecSize kvs ) => STypesI kvs where
sTypesI :: STypes kvs sTypesI :: STypes kvs
instance STypesI '[] where instance STypesI '[] where
sTypesI = STyNil sTypesI = STyNil
-- Warning: this instance is somewhat overly general as it doesn't check that the names are ordered. -- Warning: this instance is somewhat overly general as it doesn't check for lack of duplicates
instance ( kv ~ ( k SuperRecord.:= v ), KnownSymbol k, STypeI v, STypesI kvs ) => STypesI ( kv ': kvs ) where instance ( kv ~ ( k SuperRecord.:= v ), KnownSymbol k, STypeI v, STypesI kvs ) => STypesI ( kv ': kvs ) where
sTypesI = STyCons sTypesI = STyCons
data SomeSType where data SomeSType where
SomeSType :: STypeI a => Proxy# a -> SomeSType SomeSType :: STypeI a => Proxy# a -> SomeSType
instance Show SomeSType where instance Show SomeSType where
show ( SomeSType ( _ :: Proxy# a ) ) = show ( sTypeI @a ) show ( SomeSType ( _ :: Proxy# a ) ) = show ( sTypeI @a )
instance Eq SomeSType where
( SomeSType ( _ :: Proxy# a ) ) == ( SomeSType ( _ :: Proxy# b ) ) =
case eqTy @a @b of
Just _ -> True
_ -> False
eqSTy :: SType a -> SType b -> Maybe ( a :~: b ) eqSTy :: SType a -> SType b -> Maybe ( a :~: b )
eqSTy sTy_a@SFunTy sTy_b@SFunTy eqSTy sTy_a@SFunTy sTy_b@SFunTy
@ -263,7 +275,10 @@ someSTypes = go ( sTypesI @kvs )
go STyNil = [] go STyNil = []
go sTyCons@STyCons go sTyCons@STyCons
| ( _ :: STypes ( ( l SuperRecord.:= v ) ': lvs' ) ) <- sTyCons | ( _ :: STypes ( ( l SuperRecord.:= v ) ': lvs' ) ) <- sTyCons
= ( Text.pack $ symbolVal' ( proxy# :: Proxy# l ), SomeSType ( proxy# :: Proxy# v ) ) , let
l :: Text
l = Text.pack $ symbolVal' ( proxy# :: Proxy# l )
= ( l, SomeSType ( proxy# :: Proxy# v ) )
: go ( sTypesI @lvs' ) : go ( sTypesI @lvs' )
------------------------------------------------ ------------------------------------------------

View file

@ -1,6 +1,8 @@
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-} {-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
@ -12,7 +14,9 @@
{-# OPTIONS_GHC -fno-warn-orphans #-} {-# OPTIONS_GHC -fno-warn-orphans #-}
module MetaBrush.MetaParameter.Interpolation module MetaBrush.MetaParameter.Interpolation
( Interpolatable(..), MapDiff, HasDiff', HasTorsor ) ( Interpolatable(..)
, MapDiff, HasDiff', HasTorsor
)
where where
-- base -- base
@ -20,15 +24,23 @@ import Data.Functor.Identity
( Identity(..) ) ( Identity(..) )
import Data.Kind import Data.Kind
( Type ) ( Type )
import Data.List
( intercalate )
import Data.Monoid import Data.Monoid
( Sum ) ( Sum )
import GHC.Exts
( Proxy#, proxy# )
import GHC.TypeLits import GHC.TypeLits
( Symbol ) ( Symbol, KnownSymbol, symbolVal' )
-- acts -- acts
import Data.Act import Data.Act
( Act(..), Torsor(..) ) ( Act(..), Torsor(..) )
-- deepseq
import Control.DeepSeq
( NFData )
-- groups -- groups
import Data.Group import Data.Group
( Group(..) ) ( Group(..) )
@ -48,6 +60,8 @@ import Math.Module
( Module(..) ) ( Module(..) )
import Math.Vector2D import Math.Vector2D
( Point2D, Vector2D ) ( Point2D, Vector2D )
import MetaBrush.MetaParameter.AST
( STypeI(..), STypesI )
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
@ -78,8 +92,6 @@ type family MapDiff ( kvs :: [ Type ] ) = ( lvs :: [ Type ] ) | lvs -> kvs where
MapDiff ( k SuperRecord.:= v ': kvs ) = ( k SuperRecord.:= Diff v ': MapDiff kvs ) MapDiff ( k SuperRecord.:= v ': kvs ) = ( k SuperRecord.:= Diff v ': MapDiff kvs )
instance ( Monoid ( Super.Rec kvs ) instance ( Monoid ( Super.Rec kvs )
, SuperRecord.RecApply kvs kvs , SuperRecord.RecApply kvs kvs
( Tuple22C ( ConstC Group ) ( SuperRecord.Has kvs ) ) ( Tuple22C ( ConstC Group ) ( SuperRecord.Has kvs ) )

View file

@ -1,3 +1,4 @@
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE BlockArguments #-} {-# LANGUAGE BlockArguments #-}
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE DeriveTraversable #-}
@ -31,6 +32,10 @@ import Data.Functor.Compose
( Compose(..) ) ( Compose(..) )
import Data.Int import Data.Int
( Int32 ) ( Int32 )
import Data.Maybe
( mapMaybe )
import Data.Type.Equality
( (:~:)(Refl) )
import GHC.Exts import GHC.Exts
( Proxy#, proxy# ) ( Proxy#, proxy# )
import GHC.Generics import GHC.Generics
@ -45,6 +50,10 @@ import Data.Act
) )
-- containers -- containers
import Data.Map.Strict
( Map )
import qualified Data.Map.Strict as Map
( lookup )
import Data.Sequence import Data.Sequence
( Seq(..) ) ( Seq(..) )
import Data.Set import Data.Set
@ -97,6 +106,8 @@ import Math.Vector2D
( Point2D(..), Vector2D(..) ) ( Point2D(..), Vector2D(..) )
import MetaBrush.Asset.Colours import MetaBrush.Asset.Colours
( Colours, ColourRecord(..) ) ( Colours, ColourRecord(..) )
import MetaBrush.Brush
( Brush(..), BrushReference(..) )
import MetaBrush.Context import MetaBrush.Context
( Modifier(..) ( Modifier(..)
, HoldAction(..), PartialPath(..) , HoldAction(..), PartialPath(..)
@ -107,7 +118,7 @@ import MetaBrush.Document
, Stroke(..), StrokeSpline , Stroke(..), StrokeSpline
, FocusState(..) , FocusState(..)
, HoverContext(..), Hoverable(..) , HoverContext(..), Hoverable(..)
, PointData(..), Brush(..), emptyBrush , PointData(..)
, _selection , _selection
, coords , coords
) )
@ -118,11 +129,13 @@ import MetaBrush.Document.Serialise
import MetaBrush.Document.Update import MetaBrush.Document.Update
( DocChange(..) ) ( DocChange(..) )
import MetaBrush.MetaParameter.AST import MetaBrush.MetaParameter.AST
( AdaptableFunction(..), BrushFunction ) ( AdaptableFunction(..), BrushFunction, eqTys )
import MetaBrush.MetaParameter.Interpolation import MetaBrush.MetaParameter.Interpolation
( MapDiff ) ( MapDiff )
import MetaBrush.UI.ToolBar import MetaBrush.UI.ToolBar
( Mode(..) ) ( Mode(..) )
import MetaBrush.Unique
( Unique )
import MetaBrush.Util import MetaBrush.Util
( withRGBA ) ( withRGBA )
@ -159,7 +172,7 @@ renderDocument
renderDocument renderDocument
cols fitParams mode debug ( viewportWidth, viewportHeight ) cols fitParams mode debug ( viewportWidth, viewportHeight )
modifiers mbMousePos mbHoldEvent mbPartialPath modifiers mbMousePos mbHoldEvent mbPartialPath
doc@( Document { viewportCenter = Point2D cx cy, zoomFactor, documentContent = content } ) doc@( Document { viewportCenter = Point2D cx cy, zoomFactor, documentContent = content, documentBrushes } )
= ( mbUpdatedDoc, drawingInstructions ) = ( mbUpdatedDoc, drawingInstructions )
where where
@ -170,7 +183,7 @@ renderDocument
Cairo.translate ( 0.5 + 0.5 * fromIntegral viewportWidth ) ( 0.5 + 0.5 * fromIntegral viewportHeight ) Cairo.translate ( 0.5 + 0.5 * fromIntegral viewportWidth ) ( 0.5 + 0.5 * fromIntegral viewportHeight )
Cairo.scale zoomFactor zoomFactor Cairo.scale zoomFactor zoomFactor
Cairo.translate ( -cx ) ( -cy ) Cairo.translate ( -cx ) ( -cy )
for_ strokesWithOutlineInfo for_ strokesRenderData
( compositeRenders . getCompose . renderStroke cols mbHoverContext mode debug zoomFactor ) ( compositeRenders . getCompose . renderStroke cols mbHoverContext mode debug zoomFactor )
renderSelectionRect renderSelectionRect
Cairo.restore Cairo.restore
@ -227,7 +240,7 @@ renderDocument
, strokeVisible = True , strokeVisible = True
, strokeUnique = undefined , strokeUnique = undefined
, strokeName = undefined , strokeName = undefined
, strokeBrush = emptyBrush , strokeBrushRef = NoBrush
} }
) )
: strokes content : strokes content
@ -236,40 +249,8 @@ renderDocument
| otherwise | otherwise
= ( strokes content, True ) = ( strokes content, True )
strokesWithOutlineInfo :: [ ( Stroke, Maybe ( Either ( SplinePts Closed ) ( SplinePts Closed, SplinePts Closed ), Seq FitPoint ) ) ] strokesRenderData :: [ StrokeRenderData ]
strokesWithOutlineInfo = strokesRenderData = mapMaybe ( strokeRenderData fitParams documentBrushes ) modifiedStrokes
fmap
( \ stroke@(
Stroke
{ strokeSpline = spline :: StrokeSpline clo ( Super.Rec pointFields )
, strokeBrush =
strokeBrush@(
BrushData { brushFunction = ( AdaptableFunction brushFn ) :: BrushFunction brushFields }
)
, ..
} ) ->
if strokeVisible
then
case ( proxy# :: Proxy# ( brushFields `SuperRecord.Intersect` pointFields ) ) of
( _ :: Proxy# usedFields ) ->
let
-- Get the adaptable brush shape (function),
-- specialising it to the type we are using.
toUsedParams :: Super.Rec pointFields -> Super.Rec usedFields
brushShapeFn :: Super.Rec usedFields -> SplinePts Closed
( toUsedParams, brushShapeFn ) = brushFn @pointFields @usedFields
-- Compute the outline using the brush function.
newSpline :: Spline clo CachedStroke ( PointData ( Super.Rec pointFields ) )
outline :: Either ( SplinePts Closed ) ( SplinePts Closed, SplinePts Closed )
fitPts :: Seq FitPoint
( newSpline, outline, fitPts ) =
computeStrokeOutline @( Super.Rec ( MapDiff usedFields ) ) @clo @( Super.Rec usedFields )
fitParams ( toUsedParams . brushParams ) brushShapeFn spline
in ( Stroke { strokeSpline = newSpline, .. } , Just ( outline, fitPts ) )
else ( stroke , Nothing )
)
modifiedStrokes
mbUpdatedDoc :: Maybe Document mbUpdatedDoc :: Maybe Document
mbUpdatedDoc mbUpdatedDoc
@ -284,33 +265,83 @@ renderDocument
| otherwise | otherwise
= Nothing -- TODO: update the original document in this case too (by undoing the modifications) = Nothing -- TODO: update the original document in this case too (by undoing the modifications)
-- | Utility type to gather information needed to render a stroke.
renderStroke -- - No outline: just the underlying spline.
:: Colours -> Maybe HoverContext -> Mode -> Bool -> Double -- - Outline: keep track of the function which returns brush shape.
-> ( Stroke, Maybe ( Either ( SplinePts Closed ) ( SplinePts Closed, SplinePts Closed ), Seq FitPoint ) ) data StrokeRenderData where
-> Compose Renders Cairo.Render () StrokeRenderData
renderStroke cols@( Colours { brush } ) mbHoverContext mode debug zoom :: forall pointParams clo
( Stroke . ( KnownSplineType clo, Show pointParams )
{ strokeSpline = strokeSpline :: StrokeSpline clo ( Super.Rec pointFields ) => { strokeDataSpline :: !( StrokeSpline clo pointParams ) }
, strokeVisible -> StrokeRenderData
, strokeBrush = BrushData { brushFunction = ( AdaptableFunction brushFn ) :: BrushFunction brushFields } StrokeWithOutlineRenderData
:: forall pointParams clo
. ( KnownSplineType clo, Show pointParams )
=> { strokeDataSpline :: !( StrokeSpline clo pointParams )
, strokeOutlineData :: !( Either
( SplinePts Closed )
( SplinePts Closed, SplinePts Closed )
, Seq FitPoint
)
, strokeBrushFunction :: pointParams -> SplinePts Closed
} }
, mbOutlineData ) -> StrokeRenderData
| strokeVisible
, ( _ :: Proxy# usedFields ) <- proxy# :: Proxy# ( brushFields `SuperRecord.Intersect` pointFields ) -- | Compute the data necessary to render a stroke.
--
-- - If the stroke has an associated brush, this consists of:
-- - the path that the brush follows,
-- - the computed outline (using fitting algorithm),
-- - the brush shape function.
-- - Otherwise, this consists of the underlying spline path only.
strokeRenderData :: FitParameters -> Map Unique Brush -> Stroke -> Maybe StrokeRenderData
strokeRenderData fitParams brushes ( Stroke { strokeSpline = spline :: StrokeSpline clo ( Super.Rec pointFields ), .. } ) =
if strokeVisible
then case strokeBrushRef of
BrushReference ( _ :: Proxy# brushFields ) brushUnique
-- TODO: could emit a warning if the following lookup fails.
| Just ( BrushData { brushFunction = AdaptableFunction brushFn :: BrushFunction brushFields' } ) <- Map.lookup brushUnique brushes
-- TODO: the following check could be skipped if we are feeling confident.
, Just Refl <- eqTys @brushFields @brushFields' -- Refl <- ( unsafeCoerce Refl :: brushFields :~: brushFields' )
, ( _ :: Proxy# usedFields ) <- ( proxy# :: Proxy# ( brushFields `SuperRecord.Intersect` pointFields ) )
, let , let
-- Get the adaptable brush shape (function), -- Get the adaptable brush shape (function),
-- specialising it to the type we are using. -- specialising it to the type we are using.
toUsedParams :: Super.Rec pointFields -> Super.Rec usedFields toUsedParams :: Super.Rec pointFields -> Super.Rec usedFields
brushShapeFn :: Super.Rec usedFields -> SplinePts Closed brushShapeFn :: Super.Rec usedFields -> SplinePts Closed
( toUsedParams, brushShapeFn ) = brushFn @pointFields @usedFields ( toUsedParams, brushShapeFn ) = brushFn @pointFields @usedFields
= renderStrokeSpline cols mode mbHoverContext zoom -- Compute the outline using the brush function.
( when ( mode == BrushMode ) . renderBrushShape ( cols { path = brush } ) mbHoverContext ( 1.5 * zoom ) ( brushShapeFn . toUsedParams ) ) newSpline :: Spline clo CachedStroke ( PointData ( Super.Rec pointFields ) )
strokeSpline outline :: Either ( SplinePts Closed ) ( SplinePts Closed, SplinePts Closed )
*> for_ mbOutlineData \outlineData -> fitPts :: Seq FitPoint
Compose blank { renderStrokes = drawOutline cols debug zoom outlineData } ( newSpline, outline, fitPts ) =
| otherwise computeStrokeOutline @( Super.Rec ( MapDiff usedFields ) ) @clo @( Super.Rec usedFields )
= pure () fitParams ( toUsedParams . brushParams ) brushShapeFn spline
-> Just $
StrokeWithOutlineRenderData
{ strokeDataSpline = newSpline
, strokeOutlineData = ( outline, fitPts )
, strokeBrushFunction = brushShapeFn . toUsedParams
}
_ -> Just $
StrokeRenderData
{ strokeDataSpline = spline }
else Nothing
renderStroke
:: Colours -> Maybe HoverContext -> Mode -> Bool -> Double
-> StrokeRenderData
-> Compose Renders Cairo.Render ()
renderStroke cols@( Colours { brush } ) mbHoverContext mode debug zoom = \case
StrokeRenderData { strokeDataSpline } ->
renderStrokeSpline cols mode mbHoverContext zoom ( const ( pure () ) ) strokeDataSpline
StrokeWithOutlineRenderData strokeDataSpline strokeOutlineData strokeBrushFunction ->
renderStrokeSpline cols mode mbHoverContext zoom
( when ( mode == BrushMode )
. renderBrushShape ( cols { path = brush } ) mbHoverContext ( 1.5 * zoom ) strokeBrushFunction
)
strokeDataSpline
*> Compose blank { renderStrokes = drawOutline cols debug zoom strokeOutlineData }
-- | Render a sequence of stroke points. -- | Render a sequence of stroke points.
-- --