From 437d98c1ba1c0a184da28f1ea2a87fff2cfb1d7e Mon Sep 17 00:00:00 2001 From: Mart Lubbers Date: Tue, 6 Sep 2022 14:14:44 +0200 Subject: [PATCH] many updates --- .chktexrc | 3 +- Exported Items.bib | 19 + appendix/clean_for_haskell_programmers.tex | 4 +- .../conclusion.tex | 5 +- .../class_deep_embedding.tex | 809 +++++++++++++++++ .../dsl_techniques.tex | 5 +- .../first-class_datatypes.tex | 0 ...ed_multi-view_stack-based_computations.tex | 13 + frontmatter/dedication.tex | 2 +- frontmatter/motto.tex | 7 +- frontmatter/titlepage.tex | 3 +- glossaries.tex | 8 +- img/orcid.png | Bin 0 -> 3410 bytes img/orcid_16x16.png | Bin 0 -> 421 bytes introduction/introduction.tex | 150 +++- latexmkrc | 2 + lstlanghaskelllhstex.sty | 155 ++++ other.bib | 840 ++++++++++++++++++ preamble.tex | 66 +- self.bib | 88 ++ .../beyond_microprocessors.tex | 12 + thesis.bib | 456 ---------- thesis.tex | 30 +- 23 files changed, 2149 insertions(+), 528 deletions(-) create mode 100644 Exported Items.bib rename domain_specific_languages/class_deep_embedding.tex => conclusion/conclusion.tex (64%) create mode 100644 domain-specific_languages/class_deep_embedding.tex rename {domain_specific_languages => domain-specific_languages}/dsl_techniques.tex (98%) rename {domain_specific_languages => domain-specific_languages}/first-class_datatypes.tex (100%) create mode 100644 domain-specific_languages/strongly-typed_multi-view_stack-based_computations.tex create mode 100644 img/orcid.png create mode 100644 img/orcid_16x16.png create mode 100644 lstlanghaskelllhstex.sty create mode 100644 other.bib create mode 100644 self.bib create mode 100644 task_oriented_programming/beyond_microprocessors.tex delete mode 100644 thesis.bib diff --git a/.chktexrc b/.chktexrc index a7c8d73..44ec2fd 100644 --- a/.chktexrc +++ b/.chktexrc @@ -2,7 +2,7 @@ CmdLine { -v } VerbEnvir { - lstinline lstlisting algorithm code spec lstClean lstHaskell + lstinline lstlisting algorithm code spec lstClean lstHaskell lstHaskellLhstex } WipeArg { \cleaninline:{} @@ -10,6 +10,7 @@ WipeArg { \texttt:{} \url:{} \only:{} + \orcid:{} } Silent { \pause diff --git a/Exported Items.bib b/Exported Items.bib new file mode 100644 index 0000000..1212c7d --- /dev/null +++ b/Exported Items.bib @@ -0,0 +1,19 @@ + +@inproceedings{hentschel_supersensors:_2016, + address = {Vienna, Austria}, + title = {Supersensors: {Raspberry} {Pi} {Devices} for {Smart} {Campus} {Infrastructure}}, + isbn = {978-1-5090-4052-0}, + shorttitle = {Supersensors}, + url = {http://ieeexplore.ieee.org/document/7575844/}, + doi = {10.1109/FiCloud.2016.16}, + abstract = {We describe an approach for developing a campuswide sensor network using commodity single board computers. We sketch various use cases for environmental sensor data, for different university stakeholders. Our key premise is that supersensors—sensors with significant compute capability—enable more flexible data collection, processing and reaction. In this paper, we describe the initial prototype deployment of our supersensor system in a single department at the University of Glasgow.}, + language = {en}, + urldate = {2019-09-04}, + booktitle = {2016 {IEEE} 4th {International} {Conference} on {Future} {Internet} of {Things} and {Cloud} ({FiCloud})}, + publisher = {IEEE}, + author = {Hentschel, Kristian and Jacob, Dejice and Singer, Jeremy and Chalmers, Matthew}, + month = aug, + year = {2016}, + pages = {58--62}, + file = {Hentschel et al. - 2016 - Supersensors Raspberry Pi Devices for Smart Campu.pdf:/home/mrl/.local/share/zotero/storage/ATK53FN2/Hentschel et al. - 2016 - Supersensors Raspberry Pi Devices for Smart Campu.pdf:application/pdf}, +} diff --git a/appendix/clean_for_haskell_programmers.tex b/appendix/clean_for_haskell_programmers.tex index ad0b06d..521e2d6 100644 --- a/appendix/clean_for_haskell_programmers.tex +++ b/appendix/clean_for_haskell_programmers.tex @@ -20,7 +20,7 @@ \maketitle \tableofcontents }{ - \chapter{\glsentrytext{CLEAN} for \glsentrytext{HASKELL} Programmers}% + \chapter{\texorpdfstring{\glsentrytext{CLEAN}}{Clean} for \texorpdfstring{\glsentrytext{HASKELL}}{Haskell} Programmers}% \label{chp:clean_for_haskell_programmers} } @@ -100,7 +100,7 @@ For example, defining a generic equality is done as in \cref{lst:generic_eq}. Metadata about the types is available using the \cleaninline{of} syntax that gives the function access to metadata records, as can be seen in \cref{lst:generic_print} showing a generic print function. This abundance of metadata allows for very complex generic functions that near the expression level of template metaprogramming\ifSubfilesClassLoaded{}{ (See \cref{chp:first-class_datatypes})}. \lstinputlisting[language=Clean,firstline=4,label={lst:generic_print},caption={Generic print function in \gls{CLEAN}.}]{lst/generic_print.icl} -\subsection{\glsentrytext{GADT}s} +\subsection{\texorpdfstring{\glsentrytext{GADT}}{GADT}s} GADTs are enriched data types that allow the type instantiation of the constructor to be explicitly defined~\cite{cheney_first-class_2003,hinze_fun_2003}. While \glspl{GADT} are not natively supported in \gls{CLEAN}, they can be simulated using embedding-projection pairs or equivalence types~\cite[Sec.~2.2]{cheney_lightweight_2002}. To illustrate this, \cref{lst:gadt_clean} shows an example \gls{GADT} that would be implemented in \gls{HASKELL} as done in \cref{lst:gadt_haskell}\requiresGHCmod{GADTs}. diff --git a/domain_specific_languages/class_deep_embedding.tex b/conclusion/conclusion.tex similarity index 64% rename from domain_specific_languages/class_deep_embedding.tex rename to conclusion/conclusion.tex index 3a60dc8..568d51f 100644 --- a/domain_specific_languages/class_deep_embedding.tex +++ b/conclusion/conclusion.tex @@ -5,9 +5,8 @@ \pagenumbering{arabic} }{} -\chapter{Deep embedding with class}% -\label{chp:classy deep embedding} -TFP22 pearl. +\chapter{Conclusion}% +\label{chp:conclusion} \input{subfilepostamble} \end{document} diff --git a/domain-specific_languages/class_deep_embedding.tex b/domain-specific_languages/class_deep_embedding.tex new file mode 100644 index 0000000..3e43cbb --- /dev/null +++ b/domain-specific_languages/class_deep_embedding.tex @@ -0,0 +1,809 @@ +\documentclass[../thesis.tex]{subfiles} + +\begin{document} +\ifSubfilesClassLoaded{ + \pagenumbering{arabic} +}{} + +\chapter{Deep embedding with class}% +\label{chp:classy_deep_embedding} + +\begin{quote} + The two flavours of DSL embedding are shallow and deep embedding. + In functional languages, shallow embedding models the language constructs as functions in which the semantics are embedded. + Adding semantics is therefore cumbersome while adding constructs is a breeze. + Upgrading the functions to type classes lifts this limitation to a certain extent. + + Deeply embedded languages represent their language constructs as data and the semantics are functions on it. + As a result, the language constructs are embedded in the semantics, hence adding new language constructs is laborious where adding semantics is trouble free. + + This paper shows that by abstracting the semantics functions in deep embedding to type classes, it is possible to easily add language constructs as well. + So-called classy deep embedding results in DSLs that are extensible both in language constructs and in semantics while maintaining a concrete abstract syntax tree. + Additionally, little type-level trickery or complicated boilerplate code is required to achieve this. +\end{quote} + +\section{Introduction}% +The two flavours of DSL embedding are deep and shallow embedding~\cite{boulton_experience_1992}. +In functional programming languages, shallow embedding models language constructs as functions in the host language. +As a result, adding new language constructs---extra functions---is easy. +However, the semantics of the language is embedded in these functions, making it troublesome to add semantics since it requires updating all existing language constructs. + +On the other hand, deep embedding models language constructs as data in the host language. +The semantics of the language are represented by functions over the data. +Consequently, adding new semantics, i.e.\ novel functions, is straightforward. +It can be stated that the language constructs are embedded in the functions that form a semantics. +If one wants to add a language construct, all semantics functions must be revisited and revised to avoid ending up with partial functions. + +This juxtaposition has been known for many years~\cite{reynolds_user-defined_1978} and discussed by many others~\cite{krishnamurthi_synthesizing_1998} but most famously dubbed the \emph{expression problem} by Wadler~\cite{wadler_expression_1998}: + +\begin{quote} + The \emph{expression problem} is a new name for an old problem. + The goal is to define a data type by cases, where one can add new cases to the data type and new functions over the data type, without recompiling existing code, and while retaining static type safety (e.g., no casts). +\end{quote} + +In shallow embedding, abstracting the functions to type classes disentangles the language constructs from the semantics, allowing extension both ways. +This technique is dubbed tagless-final embedding~\cite{carette_finally_2009}, nonetheless it is no silver bullet. +Some semantics that require an intensional analysis of the syntax tree, such as transformation and optimisations, are difficult to implement in shallow embedding due to the lack of an explicit data structure representing the abstract syntax tree. +The semantics of the DSL have to be combined and must hold some kind of state or context, so that structural information is not lost~\cite{kiselyov_typed_2012}. + +\subsection{Research contribution} +This paper shows how to apply the technique observed in tagless-final embedding to deep embedding. +The presented basic technique, christened \emph{classy deep embedding}, does not require advanced type system extensions to be used. +However, it is suitable for type system extensions such as generalised algebraic data types. +While this paper is written as a literate +Haskell~\cite{peyton_jones_haskell_2003} program using some minor extensions provided by GHC~\cite{ghc_team_ghc_2021}, the idea is applicable to other languages as well\footnotemark{}. +\footnotetext{Lubbers, M. (2021): Literate Haskell/lhs2\TeX{} source code of the paper ``Deep Embedding +with Class'': TFP 2022.\ DANS.\ \url{https://doi.org/10.5281/zenodo.5081386}.} + +\section{Deep embedding}% + +Pick a DSL, any DSL, pick the language of literal integers and addition. +In deep embedding, terms in the language are represented by data in the host language. +Hence, defining the constructs is as simple as creating the following algebraic data type\footnote{All data types and functions are subscripted to indicate the evolution.}. + +\begin{lstHaskellLhstex} +data Expr_0 = Lit_0 Int + | Add_0 Expr_0 Expr_0 +\end{lstHaskellLhstex} + +Semantics are defined as functions on the \haskelllhstexinline{Expr_0} data type. +For example, a function transforming the term to an integer---an evaluator---is implemented as follows. + +\begin{lstHaskellLhstex} +eval_0 :: Expr_0 -> Int +eval_0 (Lit_0 e) = e +eval_0 (Add_0 e1 e2) = eval_0 e1 + eval_0 e2 +\end{lstHaskellLhstex} + +Adding semantics---e.g.\ a printer---just means adding another function while the existing functions remain untouched. +I.e.\ the key property of deep embedding. +The following function, transforming the \haskelllhstexinline{Expr_0} data type to a string, defines a simple printer for our language. + +\begin{lstHaskellLhstex} +print_0 :: Expr_0 -> String +print_0 (Lit_0 v) = show v +print_0 (Add_0 e1 e2) = "(" ++ print_0 e1 ++ "-" ++ print_0 e2 ++ ")" +\end{lstHaskellLhstex} + +While the language is concise and elegant, it is not very expressive. +Traditionally, extending the language is achieved by adding a case to the \haskelllhstexinline{Expr_0} data type. +So, adding subtraction to the language results in the following revised data type. + +\begin{lstHaskellLhstex} +data Expr_0 = Lit_0 Int + | Add_0 Expr_0 Expr_0 + | Sub_0 Expr_0 Expr_0 +\end{lstHaskellLhstex} + +Extending the DSL with language constructs exposes the Achilles' heel of deep embedding. +Adding a case to the data type means that all semantics functions have become partial and need to be updated to be able to handle this new case. +This does not seem like an insurmountable problem, but it does pose a problem if either the functions or the data type itself are written by others or are contained in a closed library. + +\section{Shallow embedding}% + +Conversely, let us see how this would be done in shallow embedding. +First, the data type is represented by functions in the host language with embedded semantics. +Therefore, the evaluators for literals and addition both become a function in the host language as follows. + +\begin{lstHaskellLhstex} +type Sem_s = Int + +lit_s :: Int -> Sem_s +lit_s i = i + +add_s :: Sem_s -> Sem_s -> Sem_s +add_s e1 e2 = e1 + e2 +\end{lstHaskellLhstex} + +Adding constructions to the language is done by adding functions. +Hence, the following function adds subtraction to our language. + +\begin{lstHaskellLhstex} +sub_s :: Sem_s -> Sem_s -> Sem_s +sub_s e1 e2 = e1 - e2 +\end{lstHaskellLhstex} + +Adding semantics on the other hand---e.g.\ a printer---is not that simple because the semantics are part of the functions representing the language constructs. +One way to add semantics is to change all functions to execute both semantics at the same time. +In our case this means changing the type of \haskelllhstexinline{Sem_s} to be \haskellinline{(Int, String)} so that all functions operate on a tuple containing the result of the evaluator and the printed representation at the same time. %chktex 36 +Alternatively, a single semantics can be defined that represents a fold over the language constructs~\cite{gibbons_folding_2014}, delaying the selection of semantics to the moment the fold is applied. + +\subsection{Tagless-final embedding} +Tagless-final embedding overcomes the limitations of standard shallow embedding. +To upgrade to this embedding technique, the language constructs are changed from functions to type classes. +For our language this results in the following type class definition. + +\begin{lstHaskellLhstex} +class Expr_t s where + lit_t :: Int -> s + add_t :: s -> s -> s +\end{lstHaskellLhstex} + +Semantics become data types\footnotemark{} implementing these type classes, resulting in the following instance for the evaluator. +\footnotetext{% + In this case \haskelllhstexinline{newtype}s are used instead of regular \haskellinline{data} declarations. + A \haskelllhstexinline{newtype} is a special data type with a single constructor containing a single value only to which it is isomorphic. + It allows the programmer to define separate class instances that the instances of the isomorphic type without any overhead. + During compilation the constructor is completely removed~\cite[Sec.~4.2.3]{peyton_jones_haskell_2003}. +} + +\begin{lstHaskellLhstex} +newtype Eval_t = E_t Int + +instance Expr_t Eval_t where + lit_t v = E_t v + add_t (E_t e1) (E_t e2) = E_t (e1 + e2) +\end{lstHaskellLhstex} + +Adding constructs---e.g.\ subtraction---just results in an extra type class and corresponding instances. + +\begin{lstHaskellLhstex} +class Sub_t s where + sub_t :: s -> s -> s + +instance Sub_t Eval_t where + sub_t (E_t e1) (E_t e2) = E_t (e1 - e2) +\end{lstHaskellLhstex} + +Finally, adding semantics such as a printer over the language is achieved by providing a data type representing the semantics accompanied by instances for the language constructs. + +\begin{lstHaskellLhstex} +newtype Printer_t = P_t String + +instance Expr_t Printer_t where + lit_t i = P_t (show i) + add_t (P_t e1) (P_t e2) = P_t ("(" ++ e1 ++ "+" ++ e2 ++ ")") + +instance Sub_t Printer_t where + sub_t (P_t e1) (P_t e2) = P_t ("(" ++ e1 ++ "-" ++ e2 ++ ")") +\end{lstHaskellLhstex} + +\section{Lifting the backends}% + +Let us rethink the deeply embedded DSL design. +Remember that in shallow embedding, the semantics are embedded in the language construct functions. +Obtaining extensibility both in constructs and semantics was accomplished by abstracting the semantics functions to type classes, making the constructs overloaded in the semantics. +In deep embedding, the constructs are embedded in the semantics functions instead. +So, let us apply the same technique, i.e.\ make the semantics overloaded in the language constructs by abstracting the semantics functions to type classes. +The same effect may be achieved when using similar techniques such as explicit dictionary passing or ML style modules. +In our language this results in the following type class. + +\begin{lstHaskellLhstex} +class Eval_1 v where + eval_1 :: v -> Int + +data Expr_1 = Lit_1 Int + | Add_1 Expr_1 Expr_1 +\end{lstHaskellLhstex} + +Implementing the semantics type class instances for the \haskelllhstexinline{Expr_1} data type is an elementary exercise. +By a copy-paste and some modifications, we come to the following implementation. + +\begin{lstHaskellLhstex} +instance Eval_1 Expr_1 where + eval_1 (Lit_1 v) = v + eval_1 (Add_1 e1 e2) = eval_1 e1 + eval_1 e2 +\end{lstHaskellLhstex} + +Subtraction can now be defined in a separate data type, leaving the original data type intact. +Instances for the additional semantics can now be implemented separately as instances of the type classes. + +\begin{lstHaskellLhstex} +data Sub_1 = Sub_1 Expr_1 Expr_1 + +instance Eval_1 Sub_1 where + eval_1 (Sub_1 e1 e2) = eval_1 e1 - eval_1 e2 +\end{lstHaskellLhstex} + +\section{Existential data types}% + +The astute reader might have noticed that we have dissociated ourselves from the original data type. +It is only possible to create an expression with a subtraction on the top level. +The recursive knot is left untied and as a result, \haskelllhstexinline{Sub_1} can never be reached from an \haskellinline{Expr_1}. + +Luckily, we can reconnect them by adding a special constructor to the \haskelllhstexinline{Expr_1} data type for housing extensions. +It contains an existentially quantified~\cite{mitchell_abstract_1988} type with type class constraints~\cite{laufer_combining_1994,laufer_type_1996} for all semantics type classes~\cite[Chp.~6.4.6]{ghc_team_ghc_2021} to allow it to house not just subtraction but any future extension. + +\begin{lstHaskellLhstex} +data Expr_2 = Lit_2 Int + | Add_2 Expr_2 Expr_2 + | forall x. Eval_2 x => Ext_2 x +\end{lstHaskellLhstex} + +The implementation of the extension case in the semantics type classes is in most cases just a matter of calling the function for the argument as can be seen in the semantics instances shown below. + +\begin{lstHaskellLhstex} +instance Eval_2 Expr_2 where + eval_2 (Lit_2 v) = v + eval_2 (Add_2 e1 e2) = eval_2 e1 + eval_2 e2 + eval_2 (Ext_2 x) = eval_2 x +\end{lstHaskellLhstex} + +Adding language construct extensions in different data types does mean that an extra \haskelllhstexinline{Ext_2} tag is introduced when using the extension. +This burden can be relieved by creating a smart constructor for it that automatically wraps the extension with the \haskelllhstexinline{Ext_2} constructor so that it is of the type of the main data type. + +\begin{lstHaskellLhstex} +sub_2 :: Expr_2 -> Expr_2 -> Expr_2 +sub_2 e1 e2 = Ext_2 (Sub_2 e1 e2) +\end{lstHaskellLhstex} + +In our example this means that the programmer can write\footnotemark{}: +\footnotetext{% + Backticks are used to use functions or constructors in an infix fashion~\cite[Sec.~4.3.3]{peyton_jones_haskell_2003}. +} +\begin{lstHaskellLhstex} +e2 :: Expr_2 +e2 = Lit_2 42 `sub_2` Lit_2 1 +\end{lstHaskellLhstex} +instead of having to write +\begin{lstHaskellLhstex} +e2p :: Expr_2 +e2p = Ext_2 (Lit_2 42 `Sub_2` Lit_2 1) +\end{lstHaskellLhstex} + +\subsection{Unbraiding the semantics from the data} +This approach does reveal a minor problem. +Namely, that all semantics type classes are braided into our datatypes via the \haskelllhstexinline{Ext_2} constructor. +Say if we add the printer again, the \haskelllhstexinline{Ext_2} constructor has to be modified to contain the printer type class constraint as well\footnote{Resulting in the following constructor: \haskellinline{forall x.(Eval_2 x, Print_2 x) => Ext_2 x}.}. %chktex 36 +Thus, if we add semantics, the main data type's type class constraints in the \haskelllhstexinline{Ext_2} constructor need to be updated. +To avoid this, the type classes can be bundled in a type class alias or type class collection as follows. + +\begin{lstHaskellLhstex} +class (Eval_2 x, Print_2 x) => Semantics_2 x + +data Expr_2 = Lit_2 Int + | Add_2 Expr_2 Expr_2 + | forall x. Semantics_2 x => Ext_2 x +\end{lstHaskellLhstex} + +The class alias removes the need for the programmer to visit the main data type when adding additional semantics. +Unfortunately, the compiler does need to visit the main data type again. +Some may argue that adding semantics happens less frequently than adding language constructs but in reality it means that we have to concede that the language is not as easily extensible in semantics as in language constructs. +More exotic type system extensions such as constraint kinds~\cite{bolingbroke_constraint_2011,yorgey_giving_2012} can untangle the semantics from the data types by making the data types parametrised by the particular semantics. +However, by adding some boilerplate, even without this extension, the language constructs can be parametrised by the semantics by putting the semantics functions in a data type. +First the data types for the language constructs are parametrised by the type variable \haskelllhstexinline{d} as follows. + +\begin{lstHaskellLhstex} +data Expr_3 d = Lit_3 Int + | Add_3 (Expr_3 d) (Expr_3 d) + | forall x. Ext_3 (d x) x +\end{lstHaskellLhstex} + +\begin{lstHaskellLhstex} +data Sub_3 d = Sub_3 (Expr_3 d) (Expr_3 d) +\end{lstHaskellLhstex} + +The \haskelllhstexinline{d} type variable is inhabited by an explicit dictionary for the semantics, i.e.\ a witness to the class instance. +Therefore, for all semantics type classes, a data type is made that contains the semantics function for the given semantics. +This means that for \haskelllhstexinline{Eval_3}, a dictionary with the function \haskellinline{EvalDict_3} is defined, a type class \haskellinline{HasEval_3} for retrieving the function from the dictionary and an instance for \haskellinline{HasEval_3} for \haskellinline{EvalDict_3}. + +\begin{lstHaskellLhstex} +newtype EvalDict_3 v = EvalDict_3 (v -> Int) + +class HasEval_3 d where + getEval_3 :: d v -> v -> Int + +instance HasEval_3 EvalDict_3 where + getEval_3 (EvalDict_3 e) = e +\end{lstHaskellLhstex} + +The instances for the type classes change as well according to the change in the datatype. +Given that there is a \haskelllhstexinline{HasEval_3} instance for the witness type \haskellinline{d}, we can provide an implementation of \haskellinline{Eval_3} for \haskellinline{Expr_3 d}. + +\begin{lstHaskellLhstex} +instance HasEval_3 d => Eval_3 (Expr_3 d) where + eval_3 (Lit_3 v) = v + eval_3 (Add_3 e1 e2) = eval_3 e1 + eval_3 e2 + eval_3 (Ext_3 d x) = getEval_3 d x + +instance HasEval_3 d => Eval_3 (Sub_3 d) where + eval_3 (Sub_3 e1 e2) = eval_3 e1 - eval_3 e2 +\end{lstHaskellLhstex} + +Because the \haskelllhstexinline{Ext_3} constructor from \haskellinline{Expr_3} now contains a value of type \haskellinline{d}, the smart constructor for \haskellinline{Sub_3} must somehow come up with this value. +To achieve this, a type class is introduced that allows the generation of such a dictionary. + +\begin{lstHaskellLhstex} +class GDict a where + gdict :: a +\end{lstHaskellLhstex} + +This type class has individual instances for all semantics dictionaries, linking the class instance to the witness value. +I.e.\ if there is a type class instance known, a witness value can be conjured using the \haskelllhstexinline{gdict} function. + +\begin{lstHaskellLhstex} +instance Eval_3 v => GDict (EvalDict_3 v) where + gdict = EvalDict_3 eval_3 +\end{lstHaskellLhstex} + +With these instances, the semantics function can be retrieved from the \haskelllhstexinline{Ext_3} constructor and in the smart constructors they can be generated as follows: + +\begin{lstHaskellLhstex} +sub_3 :: GDict (d (Sub_3 d)) => Expr_3 d -> Expr_3 d -> Expr_3 d +sub_3 e1 e2 = Ext_3 gdict (Sub_3 e1 e2) +\end{lstHaskellLhstex} + +Finally, we reached the end goal, orthogonal extension of both language constructs as shown by adding subtraction to the language and in language semantics. +Adding the printer can now be done without touching the original code as follows. +First the printer type class, dictionaries and instances for \haskelllhstexinline{GDict} are defined. + +\begin{lstHaskellLhstex} +class Print_3 v where + print_3 :: v -> String + +newtype PrintDict_3 v = PrintDict_3 (v -> String) + +class HasPrint_3 d where + getPrint_3 :: d v -> v -> String + +instance HasPrint_3 PrintDict_3 where + getPrint_3 (PrintDict_3 e) = e + +instance Print_3 v => GDict (PrintDict_3 v) where + gdict = PrintDict_3 print_3 +\end{lstHaskellLhstex} + +Then the instances for \haskelllhstexinline{Print_3} of all the language constructs can be defined. + +\begin{lstHaskellLhstex} +instance HasPrint_3 d => Print_3 (Expr_3 d) where + print_3 (Lit_3 v) = show v + print_3 (Add_3 e1 e2) = "(" ++ print_3 e1 ++ "+" ++ print_3 e2 ++ ")" + print_3 (Ext_3 d x) = getPrint_3 d x +instance HasPrint_3 d => Print_3 (Sub_3 d) where + print_3 (Sub_3 e1 e2) = "(" ++ print_3 e1 ++ "-" ++ print_3 e2 ++ ")" +\end{lstHaskellLhstex} + +\section{Transformation semantics}% + +Most semantics convert a term to some final representation and can be expressed just by functions on the cases. +However, the implementation of semantics such as transformation or optimisation may benefit from a so-called intentional analysis of the abstract syntax tree. +In shallow embedding, the implementation for these types of semantics is difficult because there is no tangible abstract syntax tree. +In off-the-shelf deep embedding this is effortless since the function can pattern match on the constructor or structures of constructors. + +To demonstrate intensional analyses in classy deep embedding we write an optimizer that removes addition and subtraction by zero. +In classy deep embedding, adding new semantics means first adding a new type class housing the function including the machinery for the extension constructor. + +\begin{lstHaskellLhstex} +class Opt_3 v where + opt_3 :: v -> v + +newtype OptDict_3 v = OptDict_3 (v -> v) + +class HasOpt_3 d where + getOpt_3 :: d v -> v -> v + +instance HasOpt_3 OptDict_3 where + getOpt_3 (OptDict_3 e) = e + +instance Opt_3 v => GDict (OptDict_3 v) where + gdict = OptDict_3 opt_3 +\end{lstHaskellLhstex} + +The implementation of the optimizer for the \haskelllhstexinline{Expr_3} data type is no complicated task. +The only interesting bit occurs in the \haskelllhstexinline{Add_3} constructor, where we pattern match on the optimised children to determine whether an addition with zero is performed. +If this is the case, the addition is removed. + +\begin{lstHaskellLhstex} +instance HasOpt_3 d => Opt_3 (Expr_3 d) where + opt_3 (Lit_3 v) = Lit_3 v + opt_3 (Add_3 e1 e2) = case (opt_3 e1, opt_3 e2) of + (Lit_3 0, e2p ) -> e2p + (e1p, Lit_3 0) -> e1p + (e1p, e2p ) -> Add_3 e1p e2p + opt_3 (Ext_3 d x) = Ext_3 d (getOpt_3 d x) +\end{lstHaskellLhstex} + +Replicating this for the \haskelllhstexinline{Opt_3} instance of \haskellinline{Sub_3} seems a clear-cut task at first glance. + +\begin{lstHaskellLhstex} +instance HasOpt_3 d => Opt_3 (Sub_3 d) where + opt_3 (Sub_3 e1 e2) = case (opt_3 e1, opt_3 e2) of + (e1p, Lit_3 0) -> e1p + (e1p, e2p ) -> Sub_3 e1p e2p +\end{lstHaskellLhstex} + +Unsurprisingly, this code is rejected by the compiler. +When a literal zero is matched as the right-hand side of a subtraction, the left-hand side of type \haskelllhstexinline{Expr_3} is returned. +However, the type signature of the function dictates that it should be of type \haskelllhstexinline{Sub_3}. +To overcome this problem we add a convolution constructor. + +\subsection{Convolution}% + +Adding a loopback case or convolution constructor to \haskelllhstexinline{Sub_3} allows the removal of the \haskellinline{Sub_3} constructor while remaining the \haskellinline{Sub_3} type. +It should be noted that a loopback case is \emph{only} required if the transformation actually removes tags. +This changes the \haskelllhstexinline{Sub_3} data type as follows. + +\begin{lstHaskellLhstex} +data Sub_4 d = Sub_4 (Expr_4 d) (Expr_4 d) + | SubLoop_4 (Expr_4 d) + +instance HasEval_4 d => Eval_4 (Sub_4 d) where + eval_4 (Sub_4 e1 e2) = eval_4 e1 - eval_4 e2 + eval_4 (SubLoop_4 e1) = eval_4 e1 +\end{lstHaskellLhstex} + +With this loopback case in the toolbox, the following \haskelllhstexinline{Sub} instance optimises away subtraction with zero literals. + +\begin{lstHaskellLhstex} +instance HasOpt_4 d => Opt_4 (Sub_4 d) where + opt_4 (Sub_4 e1 e2) = case (opt_4 e1, opt_4 e2) of + (e1p, Lit_4 0) -> SubLoop_4 e1p + (e1p, e2p ) -> Sub_4 e1p e2p + opt_4 (SubLoop_4 e) = SubLoop_4 (opt_4 e) +\end{lstHaskellLhstex} + +\subsection{Pattern matching}% + +Pattern matching within datatypes and from an extension to the main data type works out of the box. +Cross-extensional pattern matching on the other hand---matching on a particular extension---is something that requires a bit of extra care. +Take for example negation propagation and double negation elimination. +Pattern matching on values with an existential type is not possible without leveraging dynamic typing~\cite{abadi_dynamic_1991,baars_typing_2002}. +To enable dynamic typing support, the \haskelllhstexinline{Typeable} type class as provided by \haskellinline{Data.Dynamic}~\cite{ghc_team_datadynamic_2021} is added to the list of constraints in all places where we need to pattern match across extensions. +As a result, the \haskelllhstexinline{Typeable} type class constraints are added to the quantified type variable \haskellinline{x} of the \haskellinline{Ext_4} constructor and to \haskellinline{d}s in the smart constructors. + +\begin{lstHaskellLhstex} +data Expr_4 d = Lit_4 Int + | Add_4 (Expr_4 d) (Expr_4 d) + | forall x. Typeable x => Ext_4 (d x) x +\end{lstHaskellLhstex} + +First let us add negation to the language by defining a datatype representing it. +Negation elimination requires the removal of negation constructors, so a convolution constructor is defined as well. + +\begin{lstHaskellLhstex} +data Neg_4 d = Neg_4 (Expr_4 d) + | NegLoop_4 (Expr_4 d) + +neg_4 :: (Typeable d, GDict (d (Neg_4 d))) => Expr_4 d -> Expr_4 d +neg_4 e = Ext_4 gdict (Neg_4 e) +\end{lstHaskellLhstex} + +The evaluation and printer instances for the \haskelllhstexinline{Neg_4} datatype are defined as follows. + +\begin{lstHaskellLhstex} +instance HasEval_4 d => Eval_4 (Neg_4 d) where + eval_4 (Neg_4 e) = negate (eval_4 e) + eval_4 (NegLoop_4 e) = eval_4 e + +instance HasPrint_4 d => Print_4 (Neg_4 d) where + print_4 (Neg_4 e) = "(~" ++ print_4 e ++ ")" + print_4 (NegLoop_4 e) = print_4 e +\end{lstHaskellLhstex} + +The \haskelllhstexinline{Opt_4} instance contains the interesting bit. +If the sub expression of a negation is an addition, negation is propagated downwards. +If the sub expression is again a negation, something that can only be found out by a dynamic pattern match, it is replaced by a \haskelllhstexinline{NegLoop_4} constructor. + +\begin{lstHaskellLhstex} +instance (Typeable d, GDict (d (Neg_4 d)), HasOpt_4 d) => Opt_4 (Neg_4 d) where + opt_4 (Neg_4 (Add_4 e1 e2)) = NegLoop_4 (Add_4 (opt_4 (neg_4 e1)) (opt_4 (neg_4 e2))) + opt_4 (Neg_4 (Ext_4 d x)) = case fromDynamic (toDyn (getOpt_4 d x)) of + Just (Neg_4 e) -> NegLoop_4 e + _ -> Neg_4 (Ext_4 d (getOpt_4 d x)) + opt_4 (Neg_4 e) = Neg_4 (opt_4 e) + opt_4 (NegLoop_4 e) = NegLoop_4 (opt_4 e) +\end{lstHaskellLhstex} + +Loopback cases do make cross-extensional pattern matching less modular in general. +For example, \haskelllhstexinline{Ext_4 d (SubLoop_4 (Lit_4 0))} is equivalent to \haskellinline{Lit_4 0} in the optimisation semantics and would require an extra pattern match. +Fortunately, this problem can be mitigated---if required---by just introducing an additional optimisation semantics that removes loopback cases. +Luckily, one does not need to resort to these arguably blunt matters often. +Dependent language functionality often does not need to span extensions, i.e.\ it is possible to group them in the same data type. + +\subsection{Chaining semantics} +Now that the data types are parametrised by the semantics a final problem needs to be overcome. +The data type is parametrised by the semantics, thus, using multiple semantics, such as evaluation after optimising is not straightforwardly possible. +Luckily, a solution is readily at hand: introduce an ad-hoc combination semantics. + +\begin{lstHaskellLhstex} +data OptPrintDict_4 v = OPD_4 (OptDict_4 v) (PrintDict_4 v) + +instance HasOpt_4 OptPrintDict_4 where + getOpt_4 (OPD_4 v _) = getOpt_4 v +instance HasPrint_4 OptPrintDict_4 where + getPrint_4 (OPD_4 _ v) = getPrint_4 v + +instance (Opt_4 v, Print_4 v) => GDict (OptPrintDict_4 v) where + gdict = OPD_4 gdict gdict +\end{lstHaskellLhstex} + +And this allows us to write \haskelllhstexinline{print_4 (opt_4 e1)} resulting in \verb|"((~42)+(~38))"| when \haskellinline{e1} represents $(\sim(42+38))-0$ and is thus defined as follows. + +\begin{lstHaskellLhstex} +e1 :: Expr_4 OptPrintDict_4 +e1 = neg_4 (Lit_4 42 `Add_4` Lit_4 38) `sub_4` Lit_4 0 +\end{lstHaskellLhstex} + +When using classy deep embedding to the fullest, the ability of the compiler to infer very general types expires. +As a consequence, defining reusable expressions that are overloaded in their semantics requires quite some type class constraints that cannot be inferred by the compiler (yet) if they use many extensions. +Solving this remains future work. +For example, the expression $\sim(42-38)+1$ has to be defined as: + +\begin{lstHaskellLhstex} +e3 :: (Typeable d, GDict (d (Neg_4 d)), GDict (d (Sub_4 d))) => Expr_4 d +e3 = neg_4 (Lit_4 42 `sub_4` Lit_4 38) `Add_4` Lit_4 1 +\end{lstHaskellLhstex} + +\section{Generalised algebraic data types}% +Generalised algebraic data types (GADTs) are enriched data types that allow the type instantiation of the constructor to be explicitly defined~\cite{cheney_first-class_2003,hinze_fun_2003}. +Leveraging GADTs, deeply embedded DSLs can be made statically type safe even when different value types are supported. +Even when GADTs are not supported natively in the language, they can be simulated using embedding-projection pairs or equivalence types~\cite[Sec.~2.2]{cheney_lightweight_2002}. +Where some solutions to the expression problem do not easily generalise to GADTs (see \cref{sec:cde:related}), classy deep embedding does. +Generalising the data structure of our DSL is fairly straightforward and to spice things up a bit, we add an equality and boolean not language construct. +To make the existing DSL constructs more general, we relax the types of those constructors. +For example, operations on integers now work on all numerals instead. +Moreover, the \haskelllhstexinline{Lit_g} constructor can be used to lift values of any type to the DSL domain as long as they have a \haskellinline{Show} instance, required for the printer. +Since some optimisations on \haskelllhstexinline{Not_g} remove constructors and therefore use cross-extensional pattern matches, \haskellinline{Typeable} constraints are added to \haskellinline{a}. +Furthermore, because the optimisations for \haskelllhstexinline{Add_g} and \haskellinline{Sub_g} are now more general, they do not only work for \haskellinline{Int}s but for any type with a \haskellinline{Num} instance, the \haskellinline{Eq} constraint is added to these constructors as well. +Finally, not to repeat ourselves too much, we only show the parts that substantially changed. +The omitted definitions and implementation can be found in \cref{sec:cde:appendix}. + +\begin{lstHaskellLhstex} +data Expr_g d a where + Lit_g :: Show a => a -> Expr_g d a + Add_g :: (Eq a, Num a) => Expr_g d a -> Expr_g d a -> Expr_g d a + Ext_g :: Typeable x => d x -> x a -> Expr_g d a +data Neg_g d a where + Neg_g :: (Typeable a, Num a) => Expr_g d a -> Neg_g d a + NegLoop_g :: Expr_g d a -> Neg_g d a +data Not_g d a where + Not_g :: Expr_g d Bool -> Not_g d Bool + NotLoop_g :: Expr_g d a -> Not_g d a +\end{lstHaskellLhstex} + +The smart constructors for the language extensions inherit the class constraints of their data types and include a \haskelllhstexinline{Typeable} constraint on the \haskellinline{d} type variable for it to be usable in the \haskellinline{Ext_g} constructor as can be seen in the smart constructor for \haskellinline{Neg_g}: + +\begin{lstHaskellLhstex} +neg_g :: (Typeable d, GDict (d (Neg_g d)), Typeable a, Num a) => Expr_g d a -> Expr_g d a +neg_g e = Ext_g gdict (Neg_g e) + +not_g :: (Typeable d, GDict (d (Not_g d))) => Expr_g d Bool -> Expr_g d Bool +not_g e = Ext_g gdict (Not_g e) +\end{lstHaskellLhstex} + +Upgrading the semantics type classes to support GADTs is done by an easy textual search and replace. +All occurrences of \haskelllhstexinline{v} are now parametrised by type variable \haskellinline{a}: + +\begin{lstHaskellLhstex} +class Eval_g v where + eval_g :: v a -> a +class Print_g v where + print_g :: v a -> String +class Opt_g v where + opt_g :: v a -> v a +\end{lstHaskellLhstex} + +Now that the shape of the type classes has changed, the dictionary data types and the type classes need to be adapted as well. +The introduced type variable \haskelllhstexinline{a} is not an argument to the type class, so it should not be an argument to the dictionary data type. +To represent this type class function, a rank-2 polymorphic function is needed~\cite[Chp.~6.4.15]{ghc_team_ghc_2021}\cite{odersky_putting_1996}. +Concretely, for the evaluatior this results in the following definitions: + +\begin{lstHaskellLhstex} +newtype EvalDict_g v = EvalDict_g (forall a. v a -> a) +class HasEval_g d where + getEval_g :: d v -> v a -> a +instance HasEval_g EvalDict_g where + getEval_g (EvalDict_g e) = e +\end{lstHaskellLhstex} + +The \haskelllhstexinline{GDict} type class is general enough, so the instances can remain the same. +The \haskelllhstexinline{Eval_g} instance of \haskellinline{GDict} looks as follows: + +\begin{lstHaskellLhstex} +instance Eval_g v => GDict (EvalDict_g v) where + gdict = EvalDict_g eval_g +\end{lstHaskellLhstex} + +Finally, the implementations for the instances can be ported without complication show using the optimisation instance of \haskelllhstexinline{Not_g}: + +\begin{lstHaskellLhstex} +instance (Typeable d, GDict (d (Not_g d)), HasOpt_g d) => Opt_g (Not_g d) where + opt_g (Not_g (Ext_g d x)) = case fromDynamic (toDyn (getOpt_g d x)) :: Maybe (Not_g d Bool) of + Just (Not_g e) -> NotLoop_g e + _ -> Not_g (Ext_g d (getOpt_g d x)) + opt_g (Not_g e) = Not_g (opt_g e) + opt_g (NotLoop_g e) = NotLoop_g (opt_g e) +\end{lstHaskellLhstex} + +\section{Conclusion}% + +Classy deep embedding is a novel organically grown embedding technique that alleviates deep embedding from the extensibility problem in most cases. + +By abstracting the semantics functions to type classes they become overloaded in the language constructs. +Thus, making it possible to add new language constructs in a separate type. +These extensions are brought together in a special extension constructor residing in the main data type. +This extension case is overloaded by the language construct using a data type containing the class dictionary. +As a result, orthogonal extension is possible for language constructs and semantics using only little syntactic overhead or type annotations. +The basic technique only requires---well established through history and relatively standard---existential data types. +However, if needed, the technique generalises to GADTs as well, adding rank-2 types to the list of type system requirements as well. +Finally, the abstract syntax tree remains observable which makes it suitable for intensional analyses, albeit using occasional dynamic typing for truly cross-extensional transformations. + +Defining reusable expressions overloaded in semantics or using multiple semantics on a single expression requires some boilerplate still, getting around this remains future work. + +\section{Related work}% +\label{sec:cde:related} + +Embedded DSL techniques in functional languages have been a topic of research for many years, thus we do not claim a complete overview of related work. + +Clearly, classy deep embedding bears most similarity to the \emph{Datatypes \`a la Carte}~\cite{swierstra_data_2008}. +In Swierstra's approach, semantics are lifted to type classes similarly to classy deep embedding. +Each language construct is their own datatype parametrised by a type parameter. +This parameter contains some type level representation of language constructs that are in use. +In classy deep embedding, extensions do not have to be enumerated at the type level but are captured in the extension case. +Because all the constructs are expressed in the type system, nifty type system tricks need to be employed to convince the compiler that everything is type safe and the class constraints can be solved. +Furthermore, it requires some boilerplate code such as functor instances for the data types. +In return, pattern matching is easier and does not require dynamic typing. +Classy deep embedding only strains the programmer with writing the extension case for the main data type and the occasional loopback constructor. + +L\"oh and Hinze proposed a language extension that allows open data types and open functions, i.e.\ functions and data types that can be extended with more cases later on~\cite{loh_open_2006}. +They hinted at the possibility of using type classes for open functions but had serious concerns that pattern matching would be crippled because constructors are becoming types, thus ultimately becoming impossible to type. +In contrast, this paper shows that pattern matching is easily attainable---albeit using dynamic types---and that the terms can be typed without complicated type system extensions. + +A technique similar to classy deep embedding was proposed by Najd and Peyton~Jones to tackle a slightly different problem, namely that of reusing a data type for multiple purposes in a slightly different form~\cite{najd_trees_2017}. +For example to decorate the abstract syntax tree of a compiler differently for each phase of the compiler. +They propose to add an extension descriptor as a type variable to a data type and a type family that can be used to decorate constructors with extra information and add additional constructors to the data type using an extension constructor. +Classy deep embedding works similarly but uses existentially quantified type variables to describe possible extensions instead of type variables and type families. +In classy deep embedding, the extensions do not need to be encoded in the type system and less boilerplate is required. +Furthermore, pattern matching on extensions becomes a bit more complicated but in return it allows for multiple extensions to be added orthogonally and avoids the necessity for type system extensions. + +Tagless-final embedding is the shallowly embedded counterpart of classy deep embedding and was invented for the same purpose; overcoming the issues with standard shallow embedding~\cite{carette_finally_2009}. +Classy deep embedding was organically grown from observing the evolution of tagless-final embedding. +The main difference between tagless-final embedding and classy deep embedding---and in general between shallow and deep embedding---is that intensional analyses of the abstract syntax tree is more difficult because there is no tangible abstract syntax tree data structure. +In classy deep embedding, it is possible to define transformations even across extensions. + +Hybrid approaches between deep and shallow embedding exist as well. +For example, Svenningson et al.\ show that by expressing the deeply embedded language in a shallowly embedded core language, extensions can be made orthogonally as well~\cite{svenningsson_combining_2013}. +This paper differs from those approaches in the sense that it does not require a core language in which all extensions need to be expressible. + +\section*{Acknowledgements} +This research is partly funded by the Royal Netherlands Navy. +Furthermore, I would like to thank Pieter and Rinus for the fruitful discussions, Ralf for inspiring me to write a functional pearl, and the anonymous reviewers for their valuable and honest comments. + +%\appendix +\begin{subappendices} +\section{Data types and definitions}% +\label{sec:cde:appendix} +\begin{lstHaskellLhstex}[caption={Data type definitions.}] +data Sub_g d a where + Sub_g :: (Eq a, Num a) => Expr_g d a -> Expr_g d a -> Sub_g d a + SubLoop_g :: Expr_g d a -> Sub_g d a + +data Eq_g d a where + Eq_g :: (Typeable a, Eq a) => Expr_g d a -> Expr_g d a -> Eq_g d Bool + EqLoop_g :: Expr_g d a -> Eq_g d a +\end{lstHaskellLhstex} + +\begin{lstHaskellLhstex}[caption={Smart constructions.}] +sub_g :: (Typeable d, GDict (d (Sub_g d)), Eq a, Num a) => + Expr_g d a -> Expr_g d a -> Expr_g d a +sub_g e1 e2 = Ext_g gdict (Sub_g e1 e2) + +eq_g :: (Typeable d, GDict (d (Eq_g d)), Eq a, Typeable a) => + Expr_g d a -> Expr_g d a -> Expr_g d Bool +eq_g e1 e2 = Ext_g gdict (Eq_g e1 e2) +\end{lstHaskellLhstex} + +\begin{lstHaskellLhstex}[caption={Semantics classes and data types.}] +newtype PrintDict_g v = PrintDict_g (forall a.v a -> String) + +class HasPrint_g d where + getPrint_g :: d v -> v a -> String + +instance HasPrint_g PrintDict_g where + getPrint_g (PrintDict_g e) = e + +newtype OptDict_g v = OptDict_g (forall a.v a -> v a) + +class HasOpt_g d where + getOpt_g :: d v -> v a -> v a + +instance HasOpt_g OptDict_g where + getOpt_g (OptDict_g e) = e +\end{lstHaskellLhstex} + +\begin{lstHaskellLhstex}[caption={\texorpdfstring{\haskelllhstexinline{GDict}}{GDict} instances}] +instance Print_g v => GDict (PrintDict_g v) where + gdict = PrintDict_g print_g +instance Opt_g v => GDict (OptDict_g v) where + gdict = OptDict_g opt_g +\end{lstHaskellLhstex} + +\begin{lstHaskellLhstex}[caption={Evaluator instances}] +instance HasEval_g d => Eval_g (Expr_g d) where + eval_g (Lit_g v) = v + eval_g (Add_g e1 e2) = eval_g e1 + eval_g e2 + eval_g (Ext_g d x) = getEval_g d x + +instance HasEval_g d => Eval_g (Sub_g d) where + eval_g (Sub_g e1 e2) = eval_g e1 - eval_g e2 + eval_g (SubLoop_g e) = eval_g e + +instance HasEval_g d => Eval_g (Neg_g d) where + eval_g (Neg_g e) = negate (eval_g e) + eval_g (NegLoop_g e) = eval_g e + +instance HasEval_g d => Eval_g (Eq_g d) where + eval_g (Eq_g e1 e2) = eval_g e1 == eval_g e2 + eval_g (EqLoop_g e) = eval_g e + +instance HasEval_g d => Eval_g (Not_g d) where + eval_g (Not_g e) = not (eval_g e) + eval_g (NotLoop_g e) = eval_g e +\end{lstHaskellLhstex} + +\begin{lstHaskellLhstex}[caption={Printer instances}] +instance HasPrint_g d => Print_g (Expr_g d) where + print_g (Lit_g v) = show v + print_g (Add_g e1 e2) = "(" ++ print_g e1 ++ "+" ++ print_g e2 ++ ")" + print_g (Ext_g d x) = getPrint_g d x + +instance HasPrint_g d => Print_g (Sub_g d) where + print_g (Sub_g e1 e2) = "(" ++ print_g e1 ++ "-" ++ print_g e2 ++ ")" + print_g (SubLoop_g e) = print_g e + +instance HasPrint_g d => Print_g (Neg_g d) where + print_g (Neg_g e) = "(negate " ++ print_g e ++ ")" + print_g (NegLoop_g e) = print_g e + +instance HasPrint_g d => Print_g (Eq_g d) where + print_g (Eq_g e1 e2) = "(" ++ print_g e1 ++ "==" ++ print_g e2 ++ ")" + print_g (EqLoop_g e) = print_g e + +instance HasPrint_g d => Print_g (Not_g d) where + print_g (Not_g e) = "(not " ++ print_g e ++ ")" + print_g (NotLoop_g e) = print_g e +\end{lstHaskellLhstex} + +\begin{lstHaskellLhstex}[caption={Optimisation instances}] +instance HasOpt_g d => Opt_g (Expr_g d) where + opt_g (Lit_g v) = Lit_g v + opt_g (Add_g e1 e2) = case (opt_g e1, opt_g e2) of + (Lit_g 0, e2p ) -> e2p + (e1p, Lit_g 0) -> e1p + (e1p, e2p ) -> Add_g e1p e2p + opt_g (Ext_g d x) = Ext_g d (getOpt_g d x) + +instance HasOpt_g d => Opt_g (Sub_g d) where + opt_g (Sub_g e1 e2) = case (opt_g e1, opt_g e2) of + (e1p, Lit_g 0) -> SubLoop_g e1p + (e1p, e2p ) -> Sub_g e1p e2p + opt_g (SubLoop_g e) = SubLoop_g (opt_g e) + +instance (Typeable d, GDict (d (Neg_g d)), HasOpt_g d) => Opt_g (Neg_g d) where + opt_g (Neg_g (Add_g e1 e2)) = NegLoop_g (Add_g (opt_g (neg_g e1)) (opt_g (neg_g e2))) + opt_g (Neg_g (Ext_g d x)) = case fromDynamic (toDyn (getOpt_g d x)) of + Just (Neg_g e) -> NegLoop_g e + _ -> Neg_g (Ext_g d (getOpt_g d x)) + opt_g (Neg_g e) = Neg_g (opt_g e) + opt_g (NegLoop_g e) = NegLoop_g (opt_g e) + +instance HasOpt_g d => Opt_g (Eq_g d) where + opt_g (Eq_g e1 e2) = Eq_g (opt_g e1) (opt_g e2) + opt_g (EqLoop_g e) = EqLoop_g (opt_g e) +\end{lstHaskellLhstex} + +\end{subappendices} + +\input{subfilepostamble} +\end{document} diff --git a/domain_specific_languages/dsl_techniques.tex b/domain-specific_languages/dsl_techniques.tex similarity index 98% rename from domain_specific_languages/dsl_techniques.tex rename to domain-specific_languages/dsl_techniques.tex index 9e0b402..fae4ec3 100644 --- a/domain_specific_languages/dsl_techniques.tex +++ b/domain-specific_languages/dsl_techniques.tex @@ -5,7 +5,8 @@ \pagenumbering{arabic} }{} -\chapter{DSL embedding techniques} +\chapter{DSL embedding techniques}% +\label{chp:dsl_embedding_techniques} An \gls{EDSL} is a language embedded in a host language created for a specific domain\todo{citation needed?}. \glspl{EDSL} can have one or more backends or views. Commonly used views are pretty printing, compiling, simulating, verifying and proving the program. @@ -48,7 +49,7 @@ print (Eq l r) = "(" ++ print l ++ "==" ++ print r ++ ")" Adding a construct, for example subtraction reveals the Achilles' heel of deep embedding, namely that we need to revisit the original data type \emph{and} all the existing views. I.e.\ we need to add \haskellinline{| Sub Expr Expr} to the \haskellinline{Expr} data type and \haskellinline{print (Sub l r) = ...} to the \haskellinline{print} view. -This limitation can be overcome by lifting the views to classes (See \cref{chp:classy deep embedding}). +This limitation can be overcome by lifting the views to classes (See \cref{chp:classy_deep_embedding}). Implementing an evaluator for the language is possible without touching any original code, we just add a function operating on the \haskellinline{Expr} data type. To store variables, it has an extra environment argument\todo{cite while}. diff --git a/domain_specific_languages/first-class_datatypes.tex b/domain-specific_languages/first-class_datatypes.tex similarity index 100% rename from domain_specific_languages/first-class_datatypes.tex rename to domain-specific_languages/first-class_datatypes.tex diff --git a/domain-specific_languages/strongly-typed_multi-view_stack-based_computations.tex b/domain-specific_languages/strongly-typed_multi-view_stack-based_computations.tex new file mode 100644 index 0000000..2e186dc --- /dev/null +++ b/domain-specific_languages/strongly-typed_multi-view_stack-based_computations.tex @@ -0,0 +1,13 @@ +\documentclass[../thesis.tex]{subfiles} + +\begin{document} +\ifSubfilesClassLoaded{ + \pagenumbering{arabic} +}{} + +\chapter{Strongly-Typed Multi-View Stack-Based Computations}% +\label{chp:strongly-typed_multi-view_stack-based_computations} +TFP22 + +\input{subfilepostamble} +\end{document} diff --git a/frontmatter/dedication.tex b/frontmatter/dedication.tex index c462cb2..0e92967 100644 --- a/frontmatter/dedication.tex +++ b/frontmatter/dedication.tex @@ -8,7 +8,7 @@ \topskip0pt \vspace*{\fill} \begin{centering} - Voor Roos en Lotte + Voor Roos, Lotte en Elvira \end{centering} \vspace*{\fill}% diff --git a/frontmatter/motto.tex b/frontmatter/motto.tex index 2c2584e..1c96bbe 100644 --- a/frontmatter/motto.tex +++ b/frontmatter/motto.tex @@ -40,10 +40,9 @@ } \epigraph{% als ik dan van al dat schrijven\\ - toch wel wat ben afgemat\\ - dan ga ik lekker zitten lezen\\ - oh wat heerlijk lijkt mij dat - % TODO exacte quote opzoeken + toch wel erg ben afgemat\\ + ga ik lekker zitten lezen\\ + o, wat heerlijk lijkt mij dat }{% H.M. (Dick) Bruna } diff --git a/frontmatter/titlepage.tex b/frontmatter/titlepage.tex index db6bd08..1412d9f 100644 --- a/frontmatter/titlepage.tex +++ b/frontmatter/titlepage.tex @@ -84,7 +84,7 @@ \noindent% %\begin{minipage}[b][][b]{0.8\textwidth} % adapt widths of minipages to your needs -\begin{minipage}[b][][b]{0.9\textwidth} % adapt widths of minipages to your needs +\begin{minipage}[b][][b]{0.95\textwidth} % adapt widths of minipages to your needs { \setlength{\parindent}{0cm}% This research was partly funded by the Royal Netherlands Navy. @@ -110,7 +110,6 @@ \includegraphics[scale=.5]{cc} \includegraphics[scale=.5]{by} \includegraphics[scale=.5]{nd} - } \end{minipage}% %\hypersetup{pageanchor=true} diff --git a/glossaries.tex b/glossaries.tex index 0dd8fb5..fea90a3 100644 --- a/glossaries.tex +++ b/glossaries.tex @@ -5,6 +5,10 @@ \newacronym{GADT}{GADT}{generalised \acrshort{ADT}} \newacronym{GHC}{GHC}{Glasgow Haskell Compiler} \newacronym{GPL}{GPL}{general-purpose language} +\newacronym{PRS}{PRS}{python raspberry pi supersensor} +\newacronym{PWS}{PWS}{(micro)python wemos supersensor}%chktex 36 +\newacronym{CRS}{PRS}{clean raspberry pi supersensor} +\newacronym{CWS}{PWS}{clean wemos supersensor} \newacronym{FP}{FP}{functional programming} \newacronym{GRS}{GRS}{graph rewriting system} \newacronym{GUI}{GUI}{graphical \acrlong{UI}} @@ -28,11 +32,11 @@ % Glossaries \newglossaryentry{MTASK}{% name=mTask, - description={is a \acrshort{TOP} \acrshort{EDSL} for microcontrollers integrated with the \gls{ITASK} system.}, + description={is a \acrshort{TOP} \acrshort{EDSL} for microcontrollers integrated with the \gls{ITASK} system}, } \newglossaryentry{ITASK}{% name=iTask, - description={is a \acrshort{TOP} \acrshort{EDSL} for creating distributed multi-user collaborative web applications.}, + description={is a \acrshort{TOP} \acrshort{EDSL} for creating distributed multi-user collaborative web applications}, } \newglossaryentry{CLEAN}{% name=Clean, diff --git a/img/orcid.png b/img/orcid.png new file mode 100644 index 0000000000000000000000000000000000000000..d73bdcbe895a4eca294b91527b051ee4134fbb05 GIT binary patch literal 3410 zcmbVPcT^ME9-f3iLWEGH3lh2tqyQlyN=qh@(3=er2q8d(keCFd386@_pacX_5yXO^ zAS$9F&9VyWJ`t6oh^*2SDWb@-3cld#zW2`AvwyrfXXf5J^L^j1+~4n>Gr8WL>lKkY zNB{s787`o&bQCY$2s!EVwgdBlbkG*jHjDgt;i8>P0UK}*FqbfxcrIVc27q;r@q8vLiY-EivLiS#_J&h8S`E>hFnhyI z1P`1CpURHpxFibLeuB`TgeA)kT<8RSI{{%i8>&q7MVg)SeeuNwSBujJm&xV$OQg6s!0*-W3n9(4Q z70YGEh!~)~q4WzTj1xx25lJ{ao@j$3TH$O-cxm`HR6A<|-kNR;I+F;*pBjJ1wFRjJ zYbl&gClK*?8lFUSCebJ)k}aJ`p@KNOpIk0Yim$v86XhF7s6UPpt3wT`g_xzGMe`B9cC({2X zZUUVK67BF*TRaVC1A>M>xMBZK&ahHvuuH}9U&ZqCm9zqvrhk>bbn#d7uw$ehBapVn zrlaCc0FX~-fX@E$&);wJid5UAm1=!AhalNsu|a3dRN1nZB5@mczT8k;QExioJgu6s zUa9X`;%cp%)FfBvgpVW;ro;_(J6&vh&OQ>$C`Vp_SX1`c(q4uHRMmIRc?`6Z`ugU= z=N69kO~G%Q9*GVgol9z+`ZDm!RDusY?s%*c%U2(J=6WIKbMW+$<;h2_nl<`*rW@%O zRGXWgG!vUG?6GDtnVMRCyW|S?6fj8$jRuB(6r%cim*Gwlzy0LVWSbeSVWn6XbO zWm&x3C|>JcNmIxhz5_osSNW6~;jG|~xV#G3(PfZTcA^L8HgXBzr^_dL_}~kDfDd;x zwLjQNJPJ#c?{ZbrGwu)@lyz?pQKR)A9DLTJszY}lLF5^{PMc=zy-!+kzujQ@hKwlj zSD!yF?Hg)^#cMox-xu}-viV#Y!&&c@cg)~+L;d@Wvc|!^iWJh8%cJHGR|_6*08@woEjFUd2RloV@DL(7y9let9)a!cCIgZS z>#&ClCSbMTOoqFD$0l=Vm3q3-#bGSkG2pS0k?u7~dl}WCp6W{Co8%v8DC;`bR+aHp z_wvFz0aI<|GB{y&xy>_{^O|{Lm^#I~WAwb)%M;KjxkrL5YA5Z_rnYLyxE zteo-;NhllO=>8IOw&lg}y;u2WkBX`nA5LT!w+BCZ3{lO5PD;qx)Ryi6hqKT;QOaa- zvQPiQM`THGiBmwqO`~H$lpwE={K)t3c6%PzFZ$PfNy-cJZ7aTgo+5AAawE;M)pkfb z`(&=3_{t(XUB;dRuV2g~Y4YuA^gABWUpe9PWkSYzx|%scv9(LeNZ_EmL>ag5k%dr8 z0|_}FcMQKH8`t~fl4VNW=)lRSThD;0=CHwzS8gf8zA2x#24q#iE8HHNA4S`MU7goM#qFRm&jl>kvNj1^qO-}`V4n|bwf*%cFo zXbs@9T;RhhZv=00?Rd5vAD%IW%7$o81gA!duOAph+GeKl)%ln5bHtnxhV^Vsz4}$I z<6vs(Nw8a|=eIfAne@`dRh1B+=Zz0UO@{$=R@OB-1YfNgJ&O){*+&h^Pu*;<^Faj_T8%>|NE=%f~FPUsfZ>a<7zWNOzly$&v0(73| zXV$p|^#?SBV1Qa!bEoEu=57$!)}cdnMqPu$+l^?SbAUB6`=@2a^5v!vFYFTvp{q;? zjZP3)=booyZYbLJl>IEZ8jPtp5walHcvR8xUl-n2Gzd1j%C>_KLs9kkO& zz8p|6%UC6r%Tj#tvK-h)ZlkIJ6#?p;iA5z^~Sk?&kEO8Y5=C`dsL!zez`pXFBVPM zZB8@*be4GodpfeY%y{u7ubw;u$Y_ke3gSBO;6mKWP%Xz-t!Kv@g7|s^h9hfk#-?`- zP9YnY`^v3WA+J~ zwnG#`_gv@Af=?p9$l0r(wzLK>UTi7zKmmv8ket4?7SDShH$5tQ3XxfRC3+Dt^zE9+ zb8f;byOmQNqNu1MH!%Lkt+7||7d`zm#J<#eB>JIF_{II_WQ~*0v@5+xHGv~6pjY>A z0P2!g&Dw;2*)Wk>AcNBc;L`^&35YnkG`-sjk|~xwwmhqQk|*`0!$HlIT6%225I<1prt{&94Xt~M{p9KJPsK8b%O zKdYpPaAomY0kwuq*4X~GW^3Ht2c(;C1DocmZ!k`eo~o-}e{aT9NuQ?aVvq5jS^s9S z;3_6W*F4?-HcMA>1lAVKf|aqh<5eRTCyGs8F|`l>}dv0gnMU_Rcb0PQD6yS0xuX~(&Oy|Hg@NxnOoI~ija2Sp#Mjb zf9b7Kc}Qk|h-46a0+@u)KQ}kKH&l6sB4&>rG#*)Tt!YJ7{r0N{Fy%FxCtStb)K;8^ zt4NyUi-uQt$8+-7Pd@s)(TcqSE8; z@BYisz64lkU9&#qU3~A}h;DFW83S@B=eJWdwsGN(O;0F^)w15s%Ce@J4kM`Qn`A KCs<1fN&h#cz;@aI literal 0 HcmV?d00001 diff --git a/img/orcid_16x16.png b/img/orcid_16x16.png new file mode 100644 index 0000000000000000000000000000000000000000..7a21d4187de4de4f33fec61ec80c7d9f4bfab036 GIT binary patch literal 421 zcmeAS@N?(olHy`uVBq!ia0vp^0wB!73?$#)eFPFzxI9nf5J(e?3%gL`gefBE_B-rKDizYo3zYG5k~@(X52*spN@{Wtj-ppguWN#5=* z>|%)>uYnx)5>H=O_Gg@I94uBH2bJakg_1p8978y+CnqEbczFk>FfdIvQ7A}DNZ?yN zW7@Q7SqBbGVoO?LY0$Cs?3`)abaW>=>aIOhZ^h#sbj-@5FNq45?T3J71m;4g^}KTH+c}l9E`GYL#4+3Zxi} z42+C)4J>sH%t8z-tPD)7OpUb-46F$#VwLR4Gf;HelF{r G5}E)Knv8D% literal 0 HcmV?d00001 diff --git a/introduction/introduction.tex b/introduction/introduction.tex index 9440d43..9a0282f 100644 --- a/introduction/introduction.tex +++ b/introduction/introduction.tex @@ -5,6 +5,12 @@ \pagenumbering{arabic} }{} \chapter{Introduction}% +%\setlength{\epigraphwidth}{.5\textwidth}% +%\epigraphhead[30]{ +% A \textbf{rhapsody} in music is a one-movement work that is episodic yet integrated, free-flowing in structure, featuring a range of highly contrasted moods, colour, and tonality. An air of spontaneous inspiration and a sense of improvisation make it freer in form than a set of variations. +%}{% +% Wikipedia~\cite{wikipedia_contributors_rhapsody_2022} +%}% \label{chp:introduction} The sheer number of connected devices around us is increasing exponentially. First and foremost, these devices are driven by software. @@ -14,7 +20,7 @@ This thesis is about \ldots \section{Internet of Things} \todo[inline]{add more citations and rewrite to make modern} The \gls{IOT} is growing rapidly and it is changing the way people and machines interact with the world. -While the term \gls{IOT} briefly gained interest around 1999 to describe the communication of \gls{RFID} devices~\cite{}, it probably already popped up halfway the eigthies in a speech by Peter T. Lewis~\cite{peter_t_lewis_speech_1985}. +While the term \gls{IOT} briefly gained interest around 1999 to describe the communication of \gls{RFID} devices~\todo{cite}, it probably already popped up halfway the eigthies in a speech by Peter T. Lewis~\cite{peter_t_lewis_speech_1985}. \begin{quote} The \acrlong{IOT}, or \acrshort{IOT}, is the integration of people, processes and technology with connectable devices and sensors to enable remote monitoring, status, manipulation and evaluation of trends of such devices. @@ -155,20 +161,146 @@ This approach to software development is also called \gls{TOSD}~\cite{wang_maint \todo{dit moet beter} \end{description} -\section{Thesis outline} +\section{Outline} +%\epigraph{% +% \textbf{rhapsody} /\textipa{['r\ae{}ps@di]}/ \emph{noun} (pl.\ \textbf{-ies}) a piece of music that is full of feeling and is not regular in form: Liszt's Hungarian Rhapsodies. +%}{% +% Oxford Advanced Learners Dictionary~\cite{margaret_deuter_rhapsody_2015}. +%} +Wikipedia defines a \emph{rhapsody} as follows~\cite{wikipedia_contributors_rhapsody_2022}: +\begin{quote} + A \textbf{rhapsody} in music is a one-movement work that is episodic yet integrated, free-flowing in structure, featuring a range of highly contrasted moods, colour, and tonality. An air of spontaneous inspiration and a sense of improvisation make it freer in form than a set of variations. +\end{quote} + +This thesis follows the tradition and consists of three movements that are episodic yet integrated. +The first movement is about embedded \gls{DSL} techniques, the second movement elaborates on \gls{TOP} for the \gls{IOT} and the third and last movement compares traditional tiered \gls{IOT} architectures to a tierless architectures such as \gls{TOP}. +The movements are readable independently if the reader is familiarised with the background material provided in \cref{chp:introduction}. + +\paragraph{\Cref{prt:dsl}} is a cumulative or paper-based movement that focusses on techniques for embedding \glspl{DSL} in functional programming lanugages. +After reading the first chapter, subsequent chapters in this movement are readable as independently. + +\subparagraph{\Cref{chp:dsl_embedding_techniques}} shows all the basic techniques and compares the properties. +This chapter is not based on a paper and written as a background for the subsequent chapters in the movement. + +\subparagraph{\Cref{chp:classy_deep_embedding}} +This chapter is based on the paper: \emph{Deep Embedding with Class}~\todo[inline]{cite when published}. +During a Master's thesis supervision~\cite{amazonas_cabral_de_andrade_developing_2018}, focussing on an early version of \gls{MTASK}, a seed was planted for a novel deep embedding technique for \glspl{DSL} where the resulting language is extendible both in constructs and in interpretation using type classes and existential data types. +Slowly the ideas organically grew to form the technique shown in the paper. + +The research from this paper and writing the paper was solely performed by me. + +\subparagraph{\Cref{chp:first-class_datatypes}} shows how to inherit data types in embedded \glspl{DSL} using metaprogramming. +This chapter is based on the paper: \emph{First-Class Data Types in Shallow Embedded Domain-Specific Languages using Metaprogramming}~\todo[inline]{cite when accepted}. + +The research in this paper and writing the paper was performed by me, though there were weekly meetings with Pieter Koopman an Rinus Plasmeijer in which we discussed and refined the ideas. + +\subparagraph{\Cref{chp:strongly-typed_multi-view_stack-based_computations}} shows how to use advanced \gls{DSL} techniques to embed type safe stack-based computations in a host language. +This chapter is based on the paper: \emph{Strongly-Typed Multi-View Stack-Based Computations}~\todo[inline]{cite when accepted}. + +I supported Pieter Koopman in performing the research in this paper. +The paper was mostly written by Pieter Koopman\todo{probably}. + +\paragraph{\Cref{prt:top}} is a monograph focussing on \glspl{TOP} for the \gls{IOT}. +Therefore, the chapters depend on eachother and are best read in order. +The monograph is compiled from the following papers and revised lecture notes. + +\begin{itemize} + \item \bibentry{koopman_task-based_2018}. + \item \bibentry{lubbers_task_2017} (extension of Master's thesis~\cite{lubbers_task_2018}). + \item \bibentry{lubbers_multitasking_2019}. + \item \emph{Simulation of a Task-Based Embedded Domain Specific Language for the Internet of Things}~\todo[inline]{cite when published} + \item \emph{Writing Internet of Things applications with Task Oriented Programming}~\todo[inline]{cite when published} + \item \bibentry{lubbers_interpreting_2019}. + \item \emph{Reducing the Power Consumption of IoT with Task-Oriented Programming}~\todo[inline]{cite when published} (extensios of Master's thesis by Sjoerd Crooijmans~\cite{crooijmans_reducing_2021}). + \item \emph{Asynchronous Shared Data Sources}~\todo[inline]{cite when accepted} + \item \emph{Green Computing for the Internet of Things}~\todo[inline]{cite when done} + \item \emph{T.B.A.}~\todo[inline]{cite when done} +\end{itemize} + +\paragraph{\Cref{prt:tvt}} +This chapter focusses on comparing traditional tiered architectures to tierless architectures and is based on a single journal paper that extensed on a conference paper. +It does both a qualitative and a quantitative four-way comparison of a smart campus application. + +The research in these papers and writing them was performed by all authors. +I created the server application, the \gls{CLEAN}/\gls{ITASK}/\gls{MTASK} implementation (\acrshort{CWS}) and the \gls{CLEAN}/\gls{ITASK} implementation (\acrshort{CRS}) +Adrian Ramsingh created the micropython implementation (\acrshort{PWS}), the original python implementation (\acrshort{PRS}) and the server application were created by Jeremy Singer, Dejice Jacob and Kristian Hentschel~\cite{hentschel_supersensors:_2016}. + +\begin{itemize} + \item \bibentry{lubbers_tiered_2020}. + \item \emph{Could Tierless Languages Reduce IoT Development Grief?}~\todo[inline]{cite when accepted} +\end{itemize} + +\paragraph{\Cref{chp:conclusion}} finally concludes and provides an outlook on future work. + +\newcounter{secondauthorcnt} +\newcounter{underreviewcnt} +\newcounter{plannedcnt} +\newcounter{publicationscnt} +\newcommand{\secondauthor}{\textsuperscript{$\ast$}\stepcounter{secondauthorcnt}} +\newcommand{\underreview}{\textsuperscript{$\dagger$}\stepcounter{underreviewcnt}} +\newcommand{\planned}{\textsuperscript{$\ddagger$}\stepcounter{plannedcnt}} + +\begin{enumerate} + \item \secondauthor{} + A Task-Based DSL for Microcomputers + + P. Koopman\footnote{\orcid{0000-0002-3688-0957}}, M. Lubbers\footnote{\orcid{0000-0002-4015-4878}}, and R. Plasmeijer (RWDSL 2018)~\cite{koopman_task-based_2018}. + \item Task Oriented Programming and the Internet of Things + + M. Lubbers, P. Koopman, and R. Plasmeijer (IFL 2018)~\cite{lubbers_task_2017} (extension of Master's thesis~\cite{lubbers_task_2018}). + \item Multitasking on Microcontrollers using Task Oriented Programming + + M. Lubbers, P. Koopman, and R. Plasmeijer (MIPRO/4COWS 2019)~\cite{lubbers_multitasking_2019}. + \item \secondauthor{} + Simulation of a Task-Based Embedded Domain Specific Language for the Internet of Things + + P. Koopman, M. Lubbers, and R. Plasmeijer (CEFP/3COWS 2018). + \item Writing Internet of Things applications with Task Oriented Programming + + M. Lubbers, P. Koopman, and R. Plasmeijer (CEFP/3COWS 2019). + \item Interpreting Task Oriented Programs on Tiny Computers + + M. Lubbers, P. Koopman, and R. Plasmeijer (IFL 2019)~\cite{lubbers_interpreting_2019}. + \item Tiered versus Tierless IoT Stacks: Comparing Smart Campus Software Architectures + + M. Lubbers, P. Koopman, A. Ramsingh\footnote{\orcid{0000-0003-3501-902X}}, J. Singer\footnote{\orcid{0000-0001-9462-6802}}, and P. Trinder\footnote{\orcid{0000-0003-0190-7010}} (IoT 2020)~\cite{lubbers_tiered_2020}. + + % in press + \item Deep Embedding with Class + + M. Lubbers (TFP 2022). + \item \secondauthor{} + Reducing the Power Consumption of IoT with Task-Oriented Programming + + S. Crooijmans, M. Lubbers, and P. Koopman (TFP 2022). + + %Under review + \item \underreview{} Could Tierless Languages Reduce IoT Development Grief? + + M. Lubbers, P. Koopman, A. Ramsingh, J. Singer, and P. Trinder (ACM TIOT) (extension of~\cite{lubbers_tiered_2020}). + \item \secondauthor\underreview{} + Strongly-Typed Multi-View Stack-Based Computations + + M. Lubbers and P. Koopman (IFL 2022 under review). + \item \underreview{} First-Class Data Types in Shallow Embedded Domain-Specific Languages using Metaprogramming + + M. Lubbers, P. Koopman, and R. Plasmeijer (IFL 2022 under review). + \item \underreview{} Asynchronous Shared Data Sources -\todo[inline]{reference correct chapters} -Chapters 0 and 0 are based on the paper \emph{A Task-Based \acrshort{DSL} for Microcomputers}~\cite{koopman_task-based_2018}. + M. Lubbers, H. Böhm, P. Koopman, and R. Plasmeijer (IFL 2022 under review). -Chapters 0 and 0 are based on the Master's thesis and paper \emph{\glst{TOP} and the \glst{IOT}}~\cite{lubbers_task_2017,lubbers_task_2018}. + %Planned + \item \planned{} Green Computing for the Internet of Things -Chapters 0 and 0 are based on the paper \emph{Multitasking on Microcontrollers using \glst{TOP}}~\cite{lubbers_multitasking_2019}. + M. Lubbers, P. Koopman (Sustrainable 2022 planned). + \item \planned{} T.B.A. -Chapters 0 and 0 are based on the paper \emph{Tiered versus Tierless \acrshort{IOT} Stacks: Comparing Smart Campus Software Architectures}~\cite{lubbers_tiered_2020}. + M. Lubbers, P. Koopman (Sustrainable 2023 planned). -Chapters 0 and 0 are based on the paper \emph{Interpreting \glst{TOP} Programs on Tiny Computers}~\cite{lubbers_interpreting_2019}. + \setcounter{publicationscnt}{\value{enumi}} +\end{enumerate} -Chapters 0 and 0 are based on the paper \emph{Writing \glst{IOT} applications with \glst{TOP}}~\cite{lubbers_writing_2019}. +There are \thepublicationscnt{} publications of which \thesecondauthorcnt{} I'm second author, \theunderreviewcnt{} are under review and \theplannedcnt{} are planned. \input{subfilepostamble} \end{document} diff --git a/latexmkrc b/latexmkrc index c6aba37..48cdc14 100644 --- a/latexmkrc +++ b/latexmkrc @@ -42,3 +42,5 @@ $clean_ext .= ' %R.ist %R.xdy'; $show_time = 1; $pdf_mode = 1; + +$output_directory = "aux"; diff --git a/lstlanghaskelllhstex.sty b/lstlanghaskelllhstex.sty new file mode 100644 index 0000000..0a51849 --- /dev/null +++ b/lstlanghaskelllhstex.sty @@ -0,0 +1,155 @@ +\lstdefinestyle{haskelllhstex}{% + language=Haskell, + deletekeywords={% + True,False,% + Bool,Int,Integer,Float,Double,String,% + Maybe,Nothing,Just,% + zip,length,Show,show,Num,Eq,print,% + id}, + morekeywords={forall}, + literate=% + {\_}{{\raisebox{.15ex}{\_}}}1 + {~}{{\raisebox{-.6ex}{\textasciitilde}}}1 + {\\}{{$\lambda\:$}}1 + {->}{{$\shortrightarrow$}}2 + {<-}{{$\shortleftarrow$}}2 + {=>}{{$\Rightarrow$}}2 + {<=}{{$\Leftarrow$}}2 + {'}{{`}}1 + {...}{{$\cdots$}}1 %chktex 11 + {\_}{{\raisebox{.15ex}{\_}}}1 + {~}{{\raisebox{-.6ex}{\textasciitilde}}}1 + {\\}{{$\lambda\:$}}1 + {->}{{$\shortrightarrow$}}2 + {<-}{{$\shortleftarrow$}}2 + {=>}{{$\Rightarrow$}}2 + {<=}{{$\Leftarrow$}}2 + {'}{{`}}1 + {...}{{$\cdots$}}1 %chktex 11 + {e1}{{e\textsubscript{1}}}2 + {e2}{{e\textsubscript{2}}}2 + {e3}{{e\textsubscript{3}}}2 + {e1p}{{e\textsubscript{1}\textsuperscript{$\prime$}}}2 + {e2p}{{e\textsubscript{2}\textsuperscript{$\prime$}}}2 + {eval_0}{{eval\textsubscript{0}}}5 + {Eval_0}{{Eval\textsubscript{0}}}5 + {Expr_0}{{Expr\textsubscript{0}}}5 + {Lit_0}{{Lit\textsubscript{0}}}4 + {Add_0}{{Add\textsubscript{0}}}4 + {Sub_0}{{Sub\textsubscript{0}}}4 + {Print_0}{{Print\textsubscript{0}}}6 + {print_0}{{print\textsubscript{0}}}6 + {Sem_s}{{Sem\textsubscript{s}}}4 + {lit_s}{{lit\textsubscript{s}}}4 + {add_s}{{add\textsubscript{s}}}4 + {sub_s}{{sub\textsubscript{s}}}4 + {Expr_t}{{Expr\textsubscript{t}}}5 + {Eval_t}{{Eval\textsubscript{t}}}5 + {E_t}{{E\textsubscript{t}}}2 + {Printer_t}{{Printer\textsubscript{t}}}8 + {P_t}{{P\textsubscript{t}}}2 + {lit_t}{{lit\textsubscript{t}}}4 + {add_t}{{add\textsubscript{t}}}4 + {Sub_t}{{Sub\textsubscript{t}}}4 + {sub_t}{{sub\textsubscript{t}}}4 + {eval_1}{{eval\textsubscript{1}}}5 + {Eval_1}{{Eval\textsubscript{1}}}5 + {Expr_1}{{Expr\textsubscript{1}}}5 + {Lit_1}{{Lit\textsubscript{1}}}4 + {Add_1}{{Add\textsubscript{1}}}4 + {Sub_1}{{Sub\textsubscript{1}}}4 + {Print_1}{{Print\textsubscript{1}}}6 + {print_1}{{print\textsubscript{1}}}6 + {eval_2}{{eval\textsubscript{2}}}5 + {Eval_2}{{Eval\textsubscript{2}}}5 + {Expr_2}{{Expr\textsubscript{2}}}5 + {Lit_2}{{Lit\textsubscript{2}}}4 + {Add_2}{{Add\textsubscript{2}}}4 + {Sub_2}{{Sub\textsubscript{2}}}4 + {sub_2}{{sub\textsubscript{2}}}4 + {Ext_2}{{Ext\textsubscript{2}}}4 + {Print_2}{{Print\textsubscript{2}}}6 + {print_2}{{print\textsubscript{2}}}6 + {Semantics_2}{{Semantics\textsubscript{2}}}{10} + {eval_3}{{eval\textsubscript{3}}}5 + {Eval_3}{{Eval\textsubscript{3}}}5 + {Print_3}{{Print\textsubscript{3}}}6 + {print_3}{{print\textsubscript{3}}}6 + {opt_3}{{opt\textsubscript{3}}}4 + {Opt_3}{{Opt\textsubscript{3}}}4 + {HasEval_3}{{HasEval\textsubscript{3}}}8 + {getEval_3}{{getEval\textsubscript{3}}}8 + {EvalDict_3}{{EvalDict\textsubscript{3}}}9 + {HasPrint_3}{{HasPrint\textsubscript{3}}}9 + {getPrint_3}{{getPrint\textsubscript{3}}}9 + {PrintDict_3}{{PrintDict\textsubscript{3}}}{10} + {HasOpt_3}{{HasOpt\textsubscript{3}}}7 + {getOpt_3}{{getOpt\textsubscript{3}}}7 + {OptDict_3}{{OptDict\textsubscript{3}}}8 + {Expr_3}{{Expr\textsubscript{3}}}5 + {Lit_3}{{Lit\textsubscript{3}}}4 + {Add_3}{{Add\textsubscript{3}}}4 + {Sub_3}{{Sub\textsubscript{3}}}4 + {sub_3}{{sub\textsubscript{3}}}4 + {Ext_3}{{Ext\textsubscript{3}}}4 + {eval_4}{{eval\textsubscript{4}}}5 + {Eval_4}{{Eval\textsubscript{4}}}5 + {Print_4}{{Print\textsubscript{4}}}5 + {print_4}{{print\textsubscript{4}}}5 + {opt_4}{{opt\textsubscript{4}}}4 + {Opt_4}{{Opt\textsubscript{4}}}4 + {HasEval_4}{{HasEval\textsubscript{4}}}8 + {getEval_4}{{getEval\textsubscript{4}}}8 + {EvalDict_4}{{EvalDict\textsubscript{4}}}9 + {HasPrint_4}{{HasPrint\textsubscript{4}}}9 + {getPrint_4}{{getPrint\textsubscript{4}}}9 + {PrintDict_4}{{PrintDict\textsubscript{4}}}{10} + {HasOpt_4}{{HasOpt\textsubscript{4}}}7 + {getOpt_4}{{getOpt\textsubscript{4}}}7 + {OptDict_4}{{OptDict\textsubscript{4}}}8 + {OptPrintDict_4}{{OptPrintDict\textsubscript{4}}}{14} + {OPD_4}{{OPD\textsubscript{4}}}4 + {Expr_4}{{Expr\textsubscript{4}}}5 + {Lit_4}{{Lit\textsubscript{4}}}4 + {Add_4}{{Add\textsubscript{4}}}4 + {Sub_4}{{Sub\textsubscript{4}}}4 + {SubLoop_4}{{SubLoop\textsubscript{4}}}8 + {sub_4}{{sub\textsubscript{4}}}4 + {Neg_4}{{Neg\textsubscript{4}}}4 + {NegLoop_4}{{NegLoop\textsubscript{4}}}8 + {neg_4}{{neg\textsubscript{4}}}4 + {Ext_4}{{Ext\textsubscript{4}}}4 + {eval_g}{{eval\textsubscript{g}}}5 + {Eval_g}{{Eval\textsubscript{g}}}5 + {Print_g}{{Print\textsubscript{g}}}6 + {print_g}{{print\textsubscript{g}}}6 + {opt_g}{{opt\textsubscript{g}}}4 + {Opt_g}{{Opt\textsubscript{g}}}4 + {HasEval_g}{{HasEval\textsubscript{g}}}8 + {getEval_g}{{getEval\textsubscript{g}}}8 + {EvalDict_g}{{EvalDict\textsubscript{g}}}9 + {HasPrint_g}{{HasPrint\textsubscript{g}}}9 + {getPrint_g}{{getPrint\textsubscript{g}}}9 + {PrintDict_g}{{PrintDict\textsubscript{g}}}{10} + {HasOpt_g}{{HasOpt\textsubscript{g}}}7 + {getOpt_g}{{getOpt\textsubscript{g}}}7 + {OptDict_g}{{OptDict\textsubscript{g}}}8 + {OptPrintDict_g}{{OptPrintDict\textsubscript{g}}}{14} + {OPD_g}{{OPD\textsubscript{g}}}4 + {Expr_g}{{Expr\textsubscript{g}}}5 + {Lit_g}{{Lit\textsubscript{g}}}4 + {Add_g}{{Add\textsubscript{g}}}4 + {Sub_g}{{Sub\textsubscript{g}}}4 + {SubLoop_g}{{SubLoop\textsubscript{g}}}8 + {sub_g}{{sub\textsubscript{g}}}4 + {Neg_g}{{Neg\textsubscript{g}}}4 + {NegLoop_g}{{NegLoop\textsubscript{g}}}8 + {neg_g}{{neg\textsubscript{g}}}4 + {NotLoop_g}{{NotLoop\textsubscript{g}}}8 + {Not_g}{{Not\textsubscript{g}}}4 + {not_g}{{not\textsubscript{g}}}4 + {EqLoop_g}{{EqLoop\textsubscript{g}}}7 + {Eq_g}{{Eq\textsubscript{g}}}3 + {eq_g}{{eq\textsubscript{g}}}3 + {Ext_g}{{Ext\textsubscript{g}}}4 +} diff --git a/other.bib b/other.bib new file mode 100644 index 0000000..7ef32eb --- /dev/null +++ b/other.bib @@ -0,0 +1,840 @@ + +@inproceedings{plasmeijer_task-oriented_2012, + address = {New York, NY, USA}, + series = {{PPDP} '12}, + title = {Task-{Oriented} {Programming} in a {Pure} {Functional} {Language}}, + isbn = {978-1-4503-1522-7}, + url = {https://doi.org/10.1145/2370776.2370801}, + doi = {10.1145/2370776.2370801}, + abstract = {Task-Oriented Programming (TOP) is a novel programming paradigm for the construction of distributed systems where users work together on the internet. When multiple users collaborate, they need to interact with each other frequently. TOP supports the definition of tasks that react to the progress made by others. With TOP, complex multi-user interactions can be programmed in a declarative style just by defining the tasks that have to be accomplished, thus eliminating the need to worry about the implementation detail that commonly frustrates the development of applications for this domain. TOP builds on four core concepts: tasks that represent computations or work to do which have an observable value that may change over time, data sharing enabling tasks to observe each other while the work is in progress, generic type driven generation of user interaction, and special combinators for sequential and parallel task composition. The semantics of these core concepts is defined in this paper. As an example we present the iTask3 framework, which embeds TOP in the functional programming language Clean.}, + booktitle = {Proceedings of the 14th {Symposium} on {Principles} and {Practice} of {Declarative} {Programming}}, + publisher = {Association for Computing Machinery}, + author = {Plasmeijer, Rinus and Lijnse, Bas and Michels, Steffen and Achten, Peter and Koopman, Pieter}, + year = {2012}, + note = {event-place: Leuven, Belgium}, + keywords = {clean, task-oriented programming}, + pages = {195--206}, + file = {103802.pdf:/home/mrl/.local/share/zotero/storage/ZE6A65AW/103802.pdf:application/pdf}, +} + +@inproceedings{brus_clean_1987, + address = {Berlin, Heidelberg}, + title = {Clean — {A} language for functional graph rewriting}, + isbn = {978-3-540-47879-9}, + abstract = {Clean is an experimental language for specifying functional computations in terms of graph rewriting. It is based on an extension of Term Rewriting Systems (TRS) in which the terms are replaced by graphs. Such a Graph Rewriting System (GRS) consists of a, possibly cyclic, directed graph, called the data graph and graph rewrite rules which specify how this data graph may be rewritten. Clean is designed to provide a firm base for functional programming. In particular, Clean is suitable as an intermediate language between functional languages and (parallel) target machine architectures. A sequential implementation of Clean on a conventional machine is described and its performance is compared with other systems. The results show that Clean can be efficiently implemented.}, + booktitle = {Functional {Programming} {Languages} and {Computer} {Architecture}}, + publisher = {Springer Berlin Heidelberg}, + author = {Brus, T. H. and van Eekelen, M. C. J. D. and van Leer, M. O. and Plasmeijer, M. J.}, + editor = {Kahn, Gilles}, + year = {1987}, + pages = {364--384}, + file = {brut87-Clean.ps.gz:/home/mrl/.local/share/zotero/storage/T2QATWIE/brut87-Clean.ps.gz:application/gzip}, +} + +@misc{bolingbroke_constraint_2011, + title = {Constraint {Kinds} for {GHC}}, + url = {http://blog.omega-prime.co.uk/2011/09/10/constraint-kinds-for-ghc/}, + urldate = {2021-06-09}, + journal = {:: (Bloggable a) ={\textgreater} a -{\textgreater} IO ()}, + author = {Bolingbroke, Max}, + month = sep, + year = {2011}, + file = {Constraint Kinds for GHC:/home/mrl/.local/share/zotero/storage/R6RL79K7/constraint-kinds-for-ghc.html:text/html}, +} + +@inproceedings{fegaras_revisiting_1996, + address = {New York, NY, USA}, + series = {{POPL} '96}, + title = {Revisiting {Catamorphisms} over {Datatypes} with {Embedded} {Functions} (or, {Programs} from {Outer} {Space})}, + isbn = {0-89791-769-3}, + url = {https://doi.org/10.1145/237721.237792}, + doi = {10.1145/237721.237792}, + abstract = {We revisit the work of Paterson and of Meijer \& Hutton, which describes how to construct catamorphisms for recursive datatype definitions that embed contravariant occurrences of the type being defined. Their construction requires, for each catamorphism, the definition of an anamorphism that has an inverse-like relationship to that catamorphism. We present an alternative construction, which replaces the stringent requirement that an inverse anamorphism be defined for each catamorphism with a more lenient restriction. The resulting construction has a more efficient implementation than that of Paterson, Meijer, and Hutton and the relevant restriction can be enforced by a Hindley-Milner type inference algorithm. We provide numerous examples illustrating our method.}, + booktitle = {Proceedings of the 23rd {ACM} {SIGPLAN}-{SIGACT} {Symposium} on {Principles} of {Programming} {Languages}}, + publisher = {Association for Computing Machinery}, + author = {Fegaras, Leonidas and Sheard, Tim}, + year = {1996}, + note = {event-place: St. Petersburg Beach, Florida, USA}, + pages = {284--294}, + file = {Fegaras and Sheard - 1996 - Revisiting Catamorphisms over Datatypes with Embed.pdf:/home/mrl/.local/share/zotero/storage/WCSRVWGC/Fegaras and Sheard - 1996 - Revisiting Catamorphisms over Datatypes with Embed.pdf:application/pdf}, +} + +@inproceedings{pfenning_higher-order_1988, + address = {New York, NY, USA}, + series = {{PLDI} '88}, + title = {Higher-{Order} {Abstract} {Syntax}}, + isbn = {0-89791-269-1}, + url = {https://doi.org/10.1145/53990.54010}, + doi = {10.1145/53990.54010}, + abstract = {We describe motivation, design, use, and implementation of higher-order abstract syntax as a central representation for programs, formulas, rules, and other syntactic objects in program manipulation and other formal systems where matching and substitution or unification are central operations. Higher-order abstract syntax incorporates name binding information in a uniform and language generic way. Thus it acts as a powerful link integrating diverse tools in such formal environments. We have implemented higher-order abstract syntax, a supporting matching and unification algorithm, and some clients in Common Lisp in the framework of the Ergo project at Carnegie Mellon University.}, + booktitle = {Proceedings of the {ACM} {SIGPLAN} 1988 {Conference} on {Programming} {Language} {Design} and {Implementation}}, + publisher = {Association for Computing Machinery}, + author = {Pfenning, F. and Elliott, C.}, + year = {1988}, + note = {event-place: Atlanta, Georgia, USA}, + pages = {199--208}, + file = {Pfenning and Elliott - 1988 - Higher-Order Abstract Syntax.pdf:/home/mrl/.local/share/zotero/storage/2HSRWURK/Pfenning and Elliott - 1988 - Higher-Order Abstract Syntax.pdf:application/pdf}, +} + +@inproceedings{chlipala_parametric_2008, + address = {New York, NY, USA}, + series = {{ICFP} '08}, + title = {Parametric {Higher}-{Order} {Abstract} {Syntax} for {Mechanized} {Semantics}}, + isbn = {978-1-59593-919-7}, + url = {https://doi.org/10.1145/1411204.1411226}, + doi = {10.1145/1411204.1411226}, + abstract = {We present parametric higher-order abstract syntax (PHOAS), a new approach to formalizing the syntax of programming languages in computer proof assistants based on type theory. Like higher-order abstract syntax (HOAS), PHOAS uses the meta language's binding constructs to represent the object language's binding constructs. Unlike HOAS, PHOAS types are definable in general-purpose type theories that support traditional functional programming, like Coq's Calculus of Inductive Constructions. We walk through how Coq can be used to develop certified, executable program transformations over several statically-typed functional programming languages formalized with PHOAS; that is, each transformation has a machine-checked proof of type preservation and semantic preservation. Our examples include CPS translation and closure conversion for simply-typed lambda calculus, CPS translation for System F, and translation from a language with ML-style pattern matching to a simpler language with no variable-arity binding constructs. By avoiding the syntactic hassle associated with first-order representation techniques, we achieve a very high degree of proof automation.}, + booktitle = {Proceedings of the 13th {ACM} {SIGPLAN} {International} {Conference} on {Functional} {Programming}}, + publisher = {Association for Computing Machinery}, + author = {Chlipala, Adam}, + year = {2008}, + note = {event-place: Victoria, BC, Canada}, + keywords = {compiler verification, dependent types, interactive proof assistants, type-theoretic semantics}, + pages = {143--156}, + file = {Chlipala - 2008 - Parametric Higher-Order Abstract Syntax for Mechan.pdf:/home/mrl/.local/share/zotero/storage/DZ33DAMU/Chlipala - 2008 - Parametric Higher-Order Abstract Syntax for Mechan.pdf:application/pdf}, +} + +@incollection{reynolds_user-defined_1978, + address = {New York, NY}, + title = {User-{Defined} {Types} and {Procedural} {Data} {Structures} as {Complementary} {Approaches} to {Data} {Abstraction}}, + isbn = {978-1-4612-6315-9}, + url = {https://doi.org/10.1007/978-1-4612-6315-9_22}, + abstract = {User-defined types (or modes) and procedural (or functional) data structures are complementary methods for data abstraction, each providing a capability lacked by the other. With user-defined types, all information about the representation of a particular kind of data is centralized in a type definition and hidden from the rest of the program. With procedural data structures, each part of the program which creates data can specify its own representation, independently of any representations used elsewhere for the same kind of data. However, this decentralization of the description of data is achieved at the cost of prohibiting primitive operations from accessing the representations of more than one data item. The contrast between these approaches is illustrated by a simple example.}, + booktitle = {Programming {Methodology}: {A} {Collection} of {Articles} by {Members} of {IFIP} {WG2}.3}, + publisher = {Springer New York}, + author = {Reynolds, John C.}, + editor = {Gries, David}, + year = {1978}, + doi = {10.1007/978-1-4612-6315-9_22}, + pages = {309--317}, + file = {Reynolds - 1978 - User-Defined Types and Procedural Data Structures .pdf:/home/mrl/.local/share/zotero/storage/ASXE73U2/Reynolds - 1978 - User-Defined Types and Procedural Data Structures .pdf:application/pdf}, +} + +@misc{ghc_team_ghc_2021, + title = {{GHC} {User}’s {Guide} {Documentation}}, + url = {https://downloads.haskell.org/~ghc/latest/docs/users_guide.pdf}, + language = {English}, + urldate = {2021-02-24}, + publisher = {Release}, + author = {GHC Team}, + year = {2021}, + file = {GHC Team - 2021 - GHC User’s Guide Documentation.pdf:/home/mrl/.local/share/zotero/storage/87ZT5VXL/GHC Team - 2021 - GHC User’s Guide Documentation.pdf:application/pdf}, +} + +@misc{ghc_team_datadynamic_2021, + title = {Data.{Dynamic}}, + url = {https://hackage.haskell.org/package/base-4.14.1.0/docs/Data-Dynamic.html}, + language = {English}, + urldate = {2021-02-24}, + publisher = {Release}, + author = {GHC Team}, + year = {2021}, +} + +@inproceedings{jeuring_polytypic_1996, + address = {Berlin, Heidelberg}, + title = {Polytypic programming}, + isbn = {978-3-540-70639-7}, + abstract = {Many functions have to be written over and over again for different datatypes, either because datatypes change during the development of programs, or because functions with similar functionality are needed on different datatypes. Examples of such functions are pretty printers, debuggers, equality functions, unifiers, pattern matchers, rewriting functions, etc. Such functions are called polytypic functions. A polytypic function is a function that is defined by induction on the structure of user-defined datatypes. This paper introduces polytypic functions, and shows how to construct and reason about polytypic functions. A larger example is studied in detail: polytypic functions for term rewriting and for determining whether a collection of rewrite rules is normalising.}, + booktitle = {Advanced {Functional} {Programming}}, + publisher = {Springer Berlin Heidelberg}, + author = {Jeuring, Johan and Jansson, Patrik}, + editor = {Launchbury, John and Meijer, Erik and Sheard, Tim}, + year = {1996}, + pages = {68--114}, + file = {Jeuring and Jansson - 1996 - Polytypic programming.pdf:/home/mrl/.local/share/zotero/storage/SLC4G2IT/Jeuring and Jansson - 1996 - Polytypic programming.pdf:application/pdf}, +} + +@book{peyton_jones_haskell_2003, + address = {Cambridge}, + title = {Haskell 98 language and libraries: the revised report}, + isbn = {0-521 826144}, + publisher = {Cambridge University Press}, + editor = {Peyton Jones, Simon}, + year = {2003}, + file = {Peyton Jones - 2003 - Haskell 98 language and libraries the revised rep.pdf:/home/mrl/.local/share/zotero/storage/UXEJT89I/Peyton Jones - 2003 - Haskell 98 language and libraries the revised rep.pdf:application/pdf}, +} + +@inproceedings{laufer_combining_1994, + title = {Combining type classes and existential types}, + booktitle = {Proceedings of the {Latin} {American} {Informatic} {Conference} ({PANEL})}, + publisher = {ITESM-CEM}, + author = {Läufer, Konstantin}, + year = {1994}, + note = {event-place: Monterrey, Mexico}, + file = {Läufer - COMBINING TYPE CLASSES AND EXISTENTIAL TYPES.pdf:/home/mrl/.local/share/zotero/storage/KR4P9EHS/Läufer - COMBINING TYPE CLASSES AND EXISTENTIAL TYPES.pdf:application/pdf}, +} + +@techreport{hughes_restricted_1999, + address = {Paris}, + title = {Restricted data types in {Haskell}}, + number = {UU-CS-1999-28}, + institution = {Department of Information and Computing Sciences, Utrecht University}, + author = {Hughes, John}, + year = {1999}, + pages = {16}, + file = {Hughes - 1999 - Restricted data types in Haskell.pdf:/home/mrl/.local/share/zotero/storage/7ZE2MYWE/Hughes - 1999 - Restricted data types in Haskell.pdf:application/pdf}, +} + +@article{najd_trees_2017, + title = {Trees that {Grow}}, + volume = {23}, + abstract = {We study the notion of extensibility in functional data types, as a new approach to the problem of decorating abstract syntax trees with additional information. We observed the need for such extensibility while redesigning the data types representing Haskell abstract syntax inside Glasgow Haskell Compiler (GHC). Specifically, we describe a programming idiom that exploits type-level functions to allow a particular form of extensibility. The approach scales to support existentials and generalised algebraic data types, and we can use pattern synonyms to make it convenient in practice.}, + number = {1}, + journal = {Journal of Universal Computer Science}, + author = {Najd, Shayan and Peyton Jones, Simon}, + month = jan, + year = {2017}, + pages = {42--62}, + file = {Najd and Jones - 2017 - Trees that Grow.pdf:/home/mrl/.local/share/zotero/storage/HYQFTWZP/Najd and Jones - 2017 - Trees that Grow.pdf:application/pdf}, +} + +@inproceedings{loh_open_2006, + address = {New York, NY, USA}, + series = {{PPDP} '06}, + title = {Open {Data} {Types} and {Open} {Functions}}, + isbn = {1-59593-388-3}, + url = {https://doi.org/10.1145/1140335.1140352}, + doi = {10.1145/1140335.1140352}, + abstract = {The problem of supporting the modular extensibility of both data and functions in one programming language at the same time is known as the expression problem. Functional languages traditionally make it easy to add new functions, but extending data (adding new data constructors) requires modifying existing code. We present a semantically and syntactically lightweight variant of open data types and open functions as a solution to the expression problem in the Haskell language. Constructors of open data types and equations of open functions may appear scattered throughout a program with several modules. The intended semantics is as follows: the program should behave as if the data types and functions were closed, defined in one place. The order of function equations is determined by best-fit pattern matching, where a specific pattern takes precedence over an unspecific one. We show that our solution is applicable to the expression problem, generic programming, and exceptions. We sketch two implementations: a direct implementation of the semantics, and a scheme based on mutually recursive modules that permits separate compilation}, + booktitle = {Proceedings of the 8th {ACM} {SIGPLAN} {International} {Conference} on {Principles} and {Practice} of {Declarative} {Programming}}, + publisher = {Association for Computing Machinery}, + author = {Löh, Andres and Hinze, Ralf}, + year = {2006}, + note = {event-place: Venice, Italy}, + keywords = {expression problem, extensible data types, extensible exceptions, extensible functions, functional programming, generic programming, Haskell, mutually recursive modules}, + pages = {133--144}, + file = {OpenDatatypes.pdf:/home/mrl/.local/share/zotero/storage/NEP9GZ9N/OpenDatatypes.pdf:application/pdf}, +} + +@inproceedings{hutton_fold_1998, + address = {New York, NY, USA}, + series = {{ICFP} '98}, + title = {Fold and {Unfold} for {Program} {Semantics}}, + isbn = {1-58113-024-4}, + url = {https://doi.org/10.1145/289423.289457}, + doi = {10.1145/289423.289457}, + abstract = {In this paper we explain how recursion operators can be used to structure and reason about program semantics within a functional language. In particular, we show how the recursion operator fold can be used to structure denotational semantics, how the dual recursion operator unfold can be used to structure operational semantics, and how algebraic properties of these operators can be used to reason about program semantics. The techniques are explained with the aid of two main examples, the first concerning arithmetic expressions, and the second concerning Milner's concurrent language CCS. The aim of the paper is to give functional programmers new insights into recursion operators, program semantics, and the relationships between them.}, + booktitle = {Proceedings of the {Third} {ACM} {SIGPLAN} {International} {Conference} on {Functional} {Programming}}, + publisher = {Association for Computing Machinery}, + author = {Hutton, Graham}, + year = {1998}, + note = {event-place: Baltimore, Maryland, USA}, + pages = {280--288}, + file = {Hutton - 1998 - Fold and unfold for program semantics.pdf:/home/mrl/.local/share/zotero/storage/YEB9K2TP/Hutton - 1998 - Fold and unfold for program semantics.pdf:application/pdf}, +} + +@article{abadi_dynamic_1991, + title = {Dynamic {Typing} in a {Statically} {Typed} {Language}}, + volume = {13}, + issn = {0164-0925}, + url = {https://doi.org/10.1145/103135.103138}, + doi = {10.1145/103135.103138}, + abstract = {Statically typed programming languages allow earlier error checking, better enforcement of diciplined programming styles, and the generation of more efficient object code than languages where all type consistency checks are performed at run time. However, even in statically typed languages, there is often the need to deal with datawhose type cannot be determined at compile time. To handle such situations safely, we propose to add a type Dynamic whose values are pairs of a value v and a type tag T where v has the type denoted by T. Instances of Dynamic are built with an explicit tagging construct and inspected with a type safe typecase construct.This paper explores the syntax, operational semantics, and denotational semantics of a simple language that includes the type Dynamic. We give examples of how dynamically typed values can be used in programming. Then we discuss an operational semantics for our language and obtain a soundness theorem. We present two formulations of the denotational semantics of this language and relate them to the operational semantics. Finally, we consider the implications of polymorphism and some implementation issues.}, + number = {2}, + journal = {ACM Trans. Program. Lang. Syst.}, + author = {Abadi, Martín and Cardelli, Luca and Pierce, Benjamin and Plotkin, Gordon}, + month = apr, + year = {1991}, + note = {Place: New York, NY, USA +Publisher: Association for Computing Machinery}, + keywords = {theory}, + pages = {237--268}, + file = {Abadi et al. - 1991 - Dynamic typing in a statically typed language.pdf:/home/mrl/.local/share/zotero/storage/CJSBG6X7/Abadi et al. - 1991 - Dynamic typing in a statically typed language.pdf:application/pdf}, +} + +@inproceedings{svenningsson_combining_2013, + address = {Berlin, Heidelberg}, + title = {Combining {Deep} and {Shallow} {Embedding} for {EDSL}}, + isbn = {978-3-642-40447-4}, + doi = {10.1007/978-3-642-40447-4_2}, + abstract = {When compiling embedded languages it is natural to use an abstract syntax tree to represent programs. This is known as a deep embedding and it is a rather cumbersome technique compared to other forms of embedding, typically leading to more code and being harder to extend. In shallow embeddings, language constructs are mapped directly to their semantics which yields more flexible and succinct implementations. But shallow embeddings are not well-suited for compiling embedded languages. We present a technique to combine deep and shallow embedding in the context of compiling embedded languages in order to provide the benefits of both techniques. In particular it helps keeping the deep embedding small and it makes extending the embedded language much easier. Our technique also has some unexpected but welcome knock-on effects. It provides fusion of functions to remove intermediate results for free without any additional effort. It also helps to give the embedded language a more natural programming interface.}, + booktitle = {Trends in {Functional} {Programming}}, + publisher = {Springer Berlin Heidelberg}, + author = {Svenningsson, Josef and Axelsson, Emil}, + editor = {Loidl, Hans-Wolfgang and Peña, Ricardo}, + year = {2013}, + pages = {21--36}, + file = {svenningsson2013combining.pdf:/home/mrl/.local/share/zotero/storage/NFBGZCZT/svenningsson2013combining.pdf:application/pdf}, +} + +@article{mitchell_abstract_1988, + title = {Abstract {Types} {Have} {Existential} {Type}}, + volume = {10}, + issn = {0164-0925}, + url = {https://doi.org/10.1145/44501.45065}, + doi = {10.1145/44501.45065}, + abstract = {Abstract data type declarations appear in typed programming languages like Ada, Alphard, CLU and ML. This form of declaration binds a list of identifiers to a type with associated operations, a composite “value” we call a data algebra. We use a second-order typed lambda calculus SOL to show how data algebras may be given types, passed as parameters, and returned as results of function calls. In the process, we discuss the semantics of abstract data type declarations and review a connection between typed programming languages and constructive logic.}, + number = {3}, + journal = {ACM Trans. Program. Lang. Syst.}, + author = {Mitchell, John C. and Plotkin, Gordon D.}, + month = jul, + year = {1988}, + note = {Place: New York, NY, USA +Publisher: Association for Computing Machinery}, + pages = {470--502}, + file = {Mitchell and Plotkin - 1988 - Abstract types have existential type.pdf:/home/mrl/.local/share/zotero/storage/QXDE5H7C/Mitchell and Plotkin - 1988 - Abstract types have existential type.pdf:application/pdf}, +} + +@inproceedings{yorgey_giving_2012, + address = {New York, NY, USA}, + series = {{TLDI} '12}, + title = {Giving {Haskell} a {Promotion}}, + isbn = {978-1-4503-1120-5}, + url = {https://doi.org/10.1145/2103786.2103795}, + doi = {10.1145/2103786.2103795}, + abstract = {Static type systems strive to be richly expressive while still being simple enough for programmers to use. We describe an experiment that enriches Haskell's kind system with two features promoted from its type system: data types and polymorphism. The new system has a very good power-to-weight ratio: it offers a significant improvement in expressiveness, but, by re-using concepts that programmers are already familiar with, the system is easy to understand and implement.}, + booktitle = {Proceedings of the 8th {ACM} {SIGPLAN} {Workshop} on {Types} in {Language} {Design} and {Implementation}}, + publisher = {Association for Computing Machinery}, + author = {Yorgey, Brent A. and Weirich, Stephanie and Cretin, Julien and Peyton Jones, Simon and Vytiniotis, Dimitrios and Magalhães, José Pedro}, + year = {2012}, + note = {event-place: Philadelphia, Pennsylvania, USA}, + keywords = {haskell, kinds, polymorphism, promotion}, + pages = {53--66}, + file = {Yorgey et al. - 2012 - Giving Haskell a Promotion.pdf:/home/mrl/.local/share/zotero/storage/7GTDGQ3I/Yorgey et al. - 2012 - Giving Haskell a Promotion.pdf:application/pdf}, +} + +@inproceedings{atkey_unembedding_2009, + address = {New York, NY, USA}, + series = {Haskell '09}, + title = {Unembedding {Domain}-{Specific} {Languages}}, + isbn = {978-1-60558-508-6}, + url = {https://doi.org/10.1145/1596638.1596644}, + doi = {10.1145/1596638.1596644}, + abstract = {Higher-order abstract syntax provides a convenient way of embedding domain-specific languages, but is awkward to analyse and manipulate directly. We explore the boundaries of higher-order abstract syntax. Our key tool is the unembedding of embedded terms as de Bruijn terms, enabling intensional analysis. As part of our solution we present techniques for separating the definition of an embedded program from its interpretation, giving modular extensions of the embedded language, and different ways to encode the types of the embedded language.}, + booktitle = {Proceedings of the 2nd {ACM} {SIGPLAN} {Symposium} on {Haskell}}, + publisher = {Association for Computing Machinery}, + author = {Atkey, Robert and Lindley, Sam and Yallop, Jeremy}, + year = {2009}, + note = {event-place: Edinburgh, Scotland}, + keywords = {domain-specific languages, higher-order abstract syntax, type classes, unembedding}, + pages = {37--48}, + file = {Atkey et al. - 2009 - Unembedding Domain-Specific Languages.pdf:/home/mrl/.local/share/zotero/storage/GVFRIDUG/Atkey et al. - 2009 - Unembedding Domain-Specific Languages.pdf:application/pdf}, +} + +@inproceedings{krishnamurthi_synthesizing_1998, + address = {Berlin, Heidelberg}, + title = {Synthesizing object-oriented and functional design to promote re-use}, + isbn = {978-3-540-69064-1}, + abstract = {Many problems require recursively specified types of data and a collection of tools that operate on those data. Over time, these problems evolve so that the programmer must extend the toolkit or extend the types and adjust the existing tools accordingly. Ideally, this should be done without modifying existing code. Unfortunately, the prevailing program design strategies do not support both forms of extensibility: functional programming accommodates the addition of tools, while object-oriented programming supports either adding new tools or extending the data set, but not both. In this paper, we present a composite design pattern that synthesizes the best of both approaches and in the process resolves the tension between the two design strategies. We also show how this protocol suggests a new set of linguistic facilities for languages that support class systems.}, + booktitle = {{ECOOP}'98 — {Object}-{Oriented} {Programming}}, + publisher = {Springer Berlin Heidelberg}, + author = {Krishnamurthi, Shriram and Felleisen, Matthias and Friedman, Daniel P.}, + editor = {Jul, Eric}, + year = {1998}, + note = {event-place: Brussels, Belgium}, + pages = {91--113}, + file = {Krishnamurthi et al. - 1998 - Synthesizing object-oriented and functional design.pdf:/home/mrl/.local/share/zotero/storage/AMMULPPT/Krishnamurthi et al. - 1998 - Synthesizing object-oriented and functional design.pdf:application/pdf}, +} + +@incollection{gibbons_functional_2015, + address = {Cham}, + title = {Functional {Programming} for {Domain}-{Specific} {Languages}}, + isbn = {978-3-319-15940-9}, + url = {https://doi.org/10.1007/978-3-319-15940-9_1}, + abstract = {Domain-specific languages are a popular application area for functional programming; and conversely, functional programming is a popular implementation vehicle for domain-specific languages—at least, for embedded ones. Why is this? The appeal of embedded domain-specific languages is greatly enhanced by the presence of convenient lightweight tools for defining, implementing, and optimising new languages; such tools represent one of functional programming's strengths. In these lectures we discuss functional programming techniques for embedded domain-specific languages; we focus especially on algebraic datatypes and higher-order functions, and their influence on deep and shallow embeddings.}, + booktitle = {Central {European} {Functional} {Programming} {School}: 5th {Summer} {School}, {CEFP} 2013, {Cluj}-{Napoca}, {Romania}, {July} 8-20, 2013, {Revised} {Selected} {Papers}}, + publisher = {Springer International Publishing}, + author = {Gibbons, Jeremy}, + editor = {Zsók, Viktória and Horváth, Zoltán and Csató, Lehel}, + year = {2015}, + doi = {10.1007/978-3-319-15940-9_1}, + pages = {1--28}, + file = {Gibbons - 2015 - Functional Programming for Domain-Specific Languag.pdf:/home/mrl/.local/share/zotero/storage/ARUBLFU6/Gibbons - 2015 - Functional Programming for Domain-Specific Languag.pdf:application/pdf}, +} + +@phdthesis{alimarine_generic_2005, + address = {Nijmegen}, + type = {{PhD}}, + title = {Generic {Functional} {Programming}}, + language = {en}, + school = {Radboud University}, + author = {Alimarine, Artem}, + year = {2005}, + file = {Alimarine - Generic Functional Programming.pdf:/home/mrl/.local/share/zotero/storage/PDTS3SGX/Alimarine - Generic Functional Programming.pdf:application/pdf}, +} + +@inproceedings{barendregt_towards_1987, + title = {Towards an intermediate language for graph rewriting}, + volume = {1}, + booktitle = {{PARLE}, {Parallel} {Architectures} and {Languages} {Europe}}, + publisher = {Springer Verlag}, + author = {Barendregt, HP and van Eekelen, MCJD and Glauert, JRW and Kennaway, JR and Plasmeijer, MJ and Sleep, MR}, + year = {1987}, + pages = {159--174}, + file = {barh87-Lean.ps.gz:/home/mrl/.local/share/zotero/storage/63FBHND7/barh87-Lean.ps.gz:application/gzip}, +} + +@incollection{wang_maintaining_2018, + address = {Cham}, + title = {Maintaining {Separation} of {Concerns} {Through} {Task} {Oriented} {Software} {Development}}, + volume = {10788}, + isbn = {978-3-319-89718-9 978-3-319-89719-6}, + url = {http://link.springer.com/10.1007/978-3-319-89719-6_2}, + abstract = {Task Oriented Programming is a programming paradigm that enhances ‘classic’ functional programming with means to express the coordination of work among people and computer systems, the distribution and control of data sources, and the human-machine interfaces. To make the creation process of such applications feasible, it is important to have separation of concerns. In this paper we demonstrate how this is achieved within the Task Oriented Software Development process and illustrate the approach by means of a case study.}, + language = {en}, + urldate = {2019-01-14}, + booktitle = {Trends in {Functional} {Programming}}, + publisher = {Springer International Publishing}, + author = {Stutterheim, Jurriën and Achten, Peter and Plasmeijer, Rinus}, + editor = {Wang, Meng and Owens, Scott}, + year = {2018}, + doi = {10.1007/978-3-319-89719-6}, + pages = {19--38}, + file = {Stutterheim et al. - 2018 - Maintaining Separation of Concerns Through Task Or.pdf:/home/mrl/.local/share/zotero/storage/4GXJEM2U/Stutterheim et al. - 2018 - Maintaining Separation of Concerns Through Task Or.pdf:application/pdf}, +} + +@misc{achten_clean_2007, + title = {Clean for {Haskell98} {Programmers}}, + url = {https://www.mbsd.cs.ru.nl/publications/papers/2007/achp2007-CleanHaskellQuickGuide.pdf}, + language = {en}, + author = {Achten, Peter}, + month = jul, + year = {2007}, + file = {Achten - Clean for Haskell98 Programmers.pdf:/home/mrl/.local/share/zotero/storage/69WWSGLF/Achten - Clean for Haskell98 Programmers.pdf:application/pdf}, +} + +@inproceedings{baccelli_reprogramming_2018, + title = {Reprogramming {Low}-end {IoT} {Devices} from the {Cloud}}, + booktitle = {2018 3rd {Cloudification} of the {Internet} of {Things} ({CIoT})}, + publisher = {IEEE}, + author = {Baccelli, Emmanuel and Doerr, Joerg and Jallouli, Ons and Kikuchi, Shinji and Morgenstern, Andreas and Padilla, Francisco Acosta and Schleiser, Kaspar and Thomas, Ian}, + year = {2018}, + pages = {1--6}, + file = {Baccelli et al. - 2018 - Reprogramming Low-end IoT Devices from the Cloud.pdf:/home/mrl/.local/share/zotero/storage/M6LX5ZJN/Baccelli et al. - 2018 - Reprogramming Low-end IoT Devices from the Cloud.pdf:application/pdf}, +} + +@inproceedings{baccelli_scripting_2018, + title = {Scripting {Over}-{The}-{Air}: {Towards} {Containers} on {Low}-end {Devices} in the {Internet} of {Things}}, + booktitle = {{IEEE} {PerCom} 2018}, + author = {Baccelli, Emmanuel and Doerr, Joerg and Kikuchi, Shinji and Padilla, Francisco and Schleiser, Kaspar and Thomas, Ian}, + year = {2018}, + file = {Baccelli et al. - Scripting Over-The-Air Towards Containers on Low-.pdf:/home/mrl/.local/share/zotero/storage/98UTMFAC/Baccelli et al. - Scripting Over-The-Air Towards Containers on Low-.pdf:application/pdf}, +} + +@article{swierstra_data_2008, + title = {Data types à la carte}, + volume = {18}, + doi = {10.1017/S0956796808006758}, + number = {4}, + journal = {Journal of functional programming}, + author = {Swierstra, Wouter}, + year = {2008}, + pages = {423--436}, + file = {swierstra2008.pdf:/home/mrl/.local/share/zotero/storage/BEQKBXWP/swierstra2008.pdf:application/pdf}, +} + +@article{groningen_exchanging_2010, + title = {Exchanging sources between {Clean} and {Haskell}: {A} double-edged front end for the {Clean} compiler}, + volume = {45}, + shorttitle = {Exchanging sources between {Clean} and {Haskell}}, + number = {11}, + journal = {ACM Sigplan Notices}, + author = {Groningen, John van and Noort, Thomas van and Achten, Peter and Koopman, Pieter and Plasmeijer, Rinus}, + year = {2010}, + pages = {49--60}, + file = {groj10-Haskell_front_end_Clean.pdf:/home/mrl/.local/share/zotero/storage/WVZWX8WT/groj10-Haskell_front_end_Clean.pdf:application/pdf}, +} + +@inproceedings{cheney_lightweight_2002, + title = {A lightweight implementation of generics and dynamics}, + url = {http://dl.acm.org/citation.cfm?id=581698}, + doi = {10.1145/581690.581698}, + urldate = {2017-05-15}, + booktitle = {Proceedings of the 2002 {ACM} {SIGPLAN} workshop on {Haskell}}, + publisher = {ACM}, + author = {Cheney, James and Hinze, Ralf}, + year = {2002}, + note = {event-place: Pittsburgh Pennsylvania, USA}, + keywords = {dynamic typing, generic programming, type representations}, + pages = {90--104}, + file = {Cheney and Hinze - 2002 - A lightweight implementation of generics and dynam.pdf:/home/mrl/.local/share/zotero/storage/FZ6EGJRJ/Cheney and Hinze - 2002 - A lightweight implementation of generics and dynam.pdf:application/pdf;HW02.pdf:/home/mrl/.local/share/zotero/storage/A8Z49NK6/HW02.pdf:application/pdf}, +} + +@article{tratt_domain_2008, + title = {Domain {Specific} {Language} {Implementation} via {Compile}-{Time} {Meta}-{Programming}}, + volume = {30}, + issn = {0164-0925}, + url = {https://doi.org/10.1145/1391956.1391958}, + doi = {10.1145/1391956.1391958}, + abstract = {Domain specific languages (DSLs) are mini-languages that are increasingly seen as being a valuable tool for software developers and non-developers alike. DSLs must currently be created in an ad-hoc fashion, often leading to high development costs and implementations of variable quality. In this article, I show how expressive DSLs can be hygienically embedded in the Converge programming language using its compile-time meta-programming facility, the concept of DSL blocks, and specialised error reporting techniques. By making use of pre-existing facilities, and following a simple methodology, DSL implementation costs can be significantly reduced whilst leading to higher quality DSL implementations.}, + number = {6}, + journal = {ACM Trans. Program. Lang. Syst.}, + author = {Tratt, Laurence}, + month = oct, + year = {2008}, + note = {Place: New York, NY, USA +Publisher: Association for Computing Machinery}, + keywords = {domain specific languages, compile-time meta-programming, Syntax extension}, + file = {Tratt - 2008 - Domain Specific Language Implementation via Compil.pdf:/home/mrl/.local/share/zotero/storage/HHGYJK4H/Tratt - 2008 - Domain Specific Language Implementation via Compil.pdf:application/pdf}, +} + +@book{peyton_jones_implementation_1987, + title = {The {Implementation} of {Functional} {Programming} {Languages}}, + url = {https://www.microsoft.com/en-us/research/publication/the-implementation-of-functional-programming-languages/}, + abstract = {My 1987 book is now out of print, but it is available here in its entirety in PDF form, in one of two formats: single-page portrait double-page landscape Both are fully searchable, thanks to OCR and Norman Ramsey. Errata Section 5.2.4, p87. We need an extra rule match us [] E = E This accounts for the possibility that in the constructor rule (Section 5.2.4) there may be some non-nullary constructors for which there are no equations. P168, line 2, "VAR" should be "TVAR".}, + publisher = {Prentice Hall}, + author = {Peyton Jones, Simon}, + month = jan, + year = {1987}, + file = {Peyton Jones - 1987 - The Implementation of Functional Programming Langu.pdf:/home/mrl/.local/share/zotero/storage/9RIR6KGD/Peyton Jones - 1987 - The Implementation of Functional Programming Langu.pdf:application/pdf}, +} + +@article{elliott_compiling_2003, + title = {Compiling embedded languages}, + volume = {13}, + doi = {10.1017/S0956796802004574}, + number = {3}, + journal = {Journal of Functional Programming}, + author = {Elliott, Conal and Finne, Sigbjørn and de Moor, Oege}, + year = {2003}, + note = {Publisher: Cambridge University Press}, + pages = {455--481}, + file = {Elliott et al. - 2003 - Compiling embedded languages.pdf:/home/mrl/.local/share/zotero/storage/3X4Z6AKB/Elliott et al. - 2003 - Compiling embedded languages.pdf:application/pdf}, +} + +@inproceedings{sheard_accomplishments_2001, + address = {Berlin, Heidelberg}, + title = {Accomplishments and {Research} {Challenges} in {Meta}-programming}, + isbn = {978-3-540-44806-8}, + abstract = {In the last ten years the study of meta-programming systems, as formal systems worthy of study in their own right, has vastly accelerated. In that time a lot has been accomplished, yet much remains to be done. In this invited talk I wish to review recent accomplishments and future research challenges in hopes that this will spur interest in meta-programming in general and lead to new and better meta-programming systems.}, + booktitle = {Semantics, {Applications}, and {Implementation} of {Program} {Generation}}, + publisher = {Springer Berlin Heidelberg}, + author = {Sheard, Tim}, + editor = {Taha, Walid}, + year = {2001}, + pages = {2--44}, + file = {Sheard - 2001 - Accomplishments and Research Challenges in Meta-pr.pdf:/home/mrl/.local/share/zotero/storage/M7NT6USA/Sheard - 2001 - Accomplishments and Research Challenges in Meta-pr.pdf:application/pdf}, +} + +@incollection{kiselyov_typed_2012, + address = {Berlin, Heidelberg}, + title = {Typed {Tagless} {Final} {Interpreters}}, + isbn = {978-3-642-32202-0}, + url = {https://doi.org/10.1007/978-3-642-32202-0_3}, + abstract = {The so-called `typed tagless final' approach of [6] has collected and polished a number of techniques for representing typed higher-order languages in a typed metalanguage, along with type-preserving interpretation, compilation and partial evaluation. The approach is an alternative to the traditional, or `initial' encoding of an object language as a (generalized) algebraic data type. Both approaches permit multiple interpretations of an expression, to evaluate it, pretty-print, etc. The final encoding represents all and only typed object terms without resorting to generalized algebraic data types, dependent or other fancy types. The final encoding lets us add new language forms and interpretations without breaking the existing terms and interpreters.}, + booktitle = {Generic and {Indexed} {Programming}: {International} {Spring} {School}, {SSGIP} 2010, {Oxford}, {UK}, {March} 22-26, 2010, {Revised} {Lectures}}, + publisher = {Springer Berlin Heidelberg}, + author = {Kiselyov, Oleg}, + editor = {Gibbons, Jeremy}, + year = {2012}, + doi = {10.1007/978-3-642-32202-0_3}, + pages = {130--174}, + file = {Kiselyov - 2012 - Typed Tagless Final Interpreters.pdf:/home/mrl/.local/share/zotero/storage/9NBYZLRP/Kiselyov - 2012 - Typed Tagless Final Interpreters.pdf:application/pdf}, +} + +@article{laufer_type_1996, + title = {Type classes with existential types}, + volume = {6}, + doi = {10.1017/S0956796800001817}, + number = {3}, + journal = {Journal of Functional Programming}, + author = {Läufer, Konstantin}, + year = {1996}, + note = {Publisher: Cambridge University Press}, + pages = {485--518}, + file = {Läufer - 1996 - Type classes with existential types.pdf:/home/mrl/.local/share/zotero/storage/FG73PZJE/Läufer - 1996 - Type classes with existential types.pdf:application/pdf}, +} + +@incollection{hinze_fun_2003, + address = {Palgrave}, + series = {Cornerstones of {Computing}}, + title = {Fun {With} {Phantom} {Types}}, + isbn = {978-0-333-99285-2}, + booktitle = {The {Fun} of {Programming}}, + publisher = {Bloomsbury Publishing}, + author = {Hinze, Ralf}, + editor = {Gibbons, Jeremy and de Moor, Oege}, + year = {2003}, + pages = {245--262}, +} + +@inproceedings{boulton_experience_1992, + address = {North-Holland}, + title = {Experience with embedding hardware description languages in {HOL}}, + volume = {10}, + isbn = {0-444-89686-4}, + abstract = {The semantics of hardware description languages can be represented in higher order logic. This provides a formal de nition that is suitable for machine processing. Experiments are in progress at Cambridge to see whether this method can be the basis of practical tools based on the HOL theorem-proving assistant. Three languages are being investigated: ELLA, Silage and VHDL. The approaches taken for these languages are compared and current progress on building semantically-based theorem-proving tools is discussed.}, + language = {en}, + booktitle = {{IFIP} {TC10}/{WG}}, + publisher = {Elsevier}, + author = {Boulton, Richard and Gordon, Andrew and Gordon, Mike and Harrison, John and Herbert, John and Tassel, John Van}, + editor = {Stavridou, Victoria and Melham, Thomas F. and Boute, Raymond T.}, + year = {1992}, + note = {event-place: Nijmegen, NL}, + pages = {129--156}, + file = {Boulton et al. - Experience with embedding hardware description lan.pdf:/home/mrl/.local/share/zotero/storage/USAAA6WM/Boulton et al. - Experience with embedding hardware description lan.pdf:application/pdf}, +} + +@inproceedings{gibbons_folding_2014, + address = {New York, NY, USA}, + series = {{ICFP} '14}, + title = {Folding {Domain}-{Specific} {Languages}: {Deep} and {Shallow} {Embeddings} ({Functional} {Pearl})}, + isbn = {978-1-4503-2873-9}, + url = {https://doi.org/10.1145/2628136.2628138}, + doi = {10.1145/2628136.2628138}, + abstract = {A domain-specific language can be implemented by embedding within a general-purpose host language. This embedding may be deep or shallow, depending on whether terms in the language construct syntactic or semantic representations. The deep and shallow styles are closely related, and intimately connected to folds; in this paper, we explore that connection.}, + booktitle = {Proceedings of the 19th {ACM} {SIGPLAN} {International} {Conference} on {Functional} {Programming}}, + publisher = {Association for Computing Machinery}, + author = {Gibbons, Jeremy and Wu, Nicolas}, + year = {2014}, + note = {event-place: Gothenburg, Sweden}, + keywords = {deep and shallow embedding, domain-specific languages, folds}, + pages = {339--347}, + file = {Gibbons and Wu - 2014 - Folding Domain-Specific Languages Deep and Shallo.pdf:/home/mrl/.local/share/zotero/storage/6WNWSLFJ/Gibbons and Wu - 2014 - Folding Domain-Specific Languages Deep and Shallo.pdf:application/pdf}, +} + +@inproceedings{oliveira_typecase_2005, + address = {New York, NY, USA}, + series = {Haskell '05}, + title = {{TypeCase}: {A} {Design} {Pattern} for {Type}-{Indexed} {Functions}}, + isbn = {1-59593-071-X}, + url = {https://doi.org/10.1145/1088348.1088358}, + doi = {10.1145/1088348.1088358}, + abstract = {A type-indexed function is a function that is defined for each member of some family of types. Haskell's type class mechanism provides collections of open type-indexed functions, in which the indexing family can be extended by defining a new type class instance but the collection of functions is fixed. The purpose of this paper is to present TypeCase: a design pattern that allows the definition of closed type-indexed functions, in which the index family is fixed but the collection of functions is extensible. It is inspired by Cheney and Hinze's work on lightweight approaches to generic programming. We generalise their techniques as a design pattern. Furthermore, we show that type-indexed functions with type-indexed types, and consequently generic functions with generic types, can also be encoded in a lightweight manner, thereby overcoming one of the main limitations of the lightweight approaches.}, + booktitle = {Proceedings of the 2005 {ACM} {SIGPLAN} {Workshop} on {Haskell}}, + publisher = {Association for Computing Machinery}, + author = {Oliveira, Bruno C. d. S. and Gibbons, Jeremy}, + year = {2005}, + note = {event-place: Tallinn, Estonia}, + keywords = {generic programming, type classes, type-indexed functions}, + pages = {98--109}, + file = {Oliveira and Gibbons - 2005 - TypeCase A Design Pattern for Type-Indexed Functi.pdf:/home/mrl/.local/share/zotero/storage/RBKEZKHN/Oliveira and Gibbons - 2005 - TypeCase A Design Pattern for Type-Indexed Functi.pdf:application/pdf}, +} + +@inproceedings{odersky_putting_1996, + address = {New York, NY, USA}, + series = {{POPL} '96}, + title = {Putting {Type} {Annotations} to {Work}}, + isbn = {0-89791-769-3}, + url = {https://doi.org/10.1145/237721.237729}, + doi = {10.1145/237721.237729}, + abstract = {We study an extension of the Hindley/Milner system with explicit type scheme annotations and type declarations. The system can express polymorphic function arguments, user-defined data types with abstract components, and structure types with polymorphic fields. More generally, all programs of the polymorphic lambda calculus can be encoded by a translation between typing derivations. We show that type reconstruction in this system can be reduced to the decidable problem of first-order unification under a mixed prefix.}, + booktitle = {Proceedings of the 23rd {ACM} {SIGPLAN}-{SIGACT} {Symposium} on {Principles} of {Programming} {Languages}}, + publisher = {Association for Computing Machinery}, + author = {Odersky, Martin and Läufer, Konstantin}, + year = {1996}, + note = {event-place: St. Petersburg Beach, Florida, USA}, + pages = {54--67}, + file = {Odersky and Läufer - 1996 - Putting Type Annotations to Work.pdf:/home/mrl/.local/share/zotero/storage/WC37TU5H/Odersky and Läufer - 1996 - Putting Type Annotations to Work.pdf:application/pdf}, +} + +@article{carette_finally_2009, + title = {Finally tagless, partially evaluated: {Tagless} staged interpreters for simpler typed languages}, + volume = {19}, + doi = {10.1017/S0956796809007205}, + number = {5}, + journal = {Journal of Functional Programming}, + author = {Carette, Jacques and Kiselyov, Oleg and Shan, Chung-Chieh}, + year = {2009}, + note = {Publisher: Cambridge University Press}, + pages = {509--543}, + file = {CARETTE et al. - 2009 - Finally tagless, partially evaluated Tagless stag.pdf:/home/mrl/.local/share/zotero/storage/T8C8VMHP/CARETTE et al. - 2009 - Finally tagless, partially evaluated Tagless stag.pdf:application/pdf}, +} + +@techreport{plasmeijer_clean_2021, + address = {Nijmegen}, + title = {Clean {Language} {Report} version 3.1}, + urldate = {2021-12-22}, + institution = {Institute for Computing and Information Sciences}, + author = {Plasmeijer, Rinus and van Eekelen, Marko and van Groningen, John}, + month = dec, + year = {2021}, + pages = {127}, + file = {CleanLanguageReport.pdf:/home/mrl/.local/share/zotero/storage/I2SDRIH6/CleanLanguageReport.pdf:application/pdf}, +} + +@inproceedings{nocker_concurrent_1991, + address = {Berlin, Heidelberg}, + title = {Concurrent clean}, + isbn = {978-3-540-47472-2}, + abstract = {Concurrent Clean is an experimental, lazy, higher-order parallel functional programming language based on term graph rewriting. An important difference with other languages is that in Clean graphs are manipulated and not terms. This can be used by the programmer to control communication and sharing of computation. Cyclic structures can be defined. Concurrent Clean furthermore allows to control the (parallel) order of evaluation to make efficient evaluation possible. With help of sequential annotations the default lazy evaluation can be locally changed into eager evaluation. The language enables the definition of partially strict data structures which make a whole new class of algorithms feasible in a functional language. A powerful and fast strictness analyser is incorporated in the system. The quality of the code generated by the Clean compiler has been greatly improved such that it is one of the best code generators for a lazy functional language. Two very powerful parallel annotations enable the programmer to define concurrent functional programs with arbitrary process topologies. Concurrent Clean is set up in such a way that the efficiency achieved for the sequential case can largely be maintained for a parallel implementation on loosely coupled parallel machine architectures.}, + booktitle = {{PARLE} '91 {Parallel} {Architectures} and {Languages} {Europe}}, + publisher = {Springer Berlin Heidelberg}, + author = {Nöcker, E. G. J. M. H. and Smetsers, J. E. W. and van Eekelen, M. C. J. D. and Plasmeijer, M. J.}, + editor = {Aarts, Emile H. L. and van Leeuwen, Jan and Rem, Martin}, + year = {1991}, + pages = {202--219}, + file = {Nöcker et al. - 1991 - Concurrent clean.pdf:/home/mrl/.local/share/zotero/storage/XHTNR7BR/Nöcker et al. - 1991 - Concurrent clean.pdf:application/pdf}, +} + +@incollection{mernik_extensible_2013, + address = {Hershey, PA, USA}, + title = {Extensible {Languages}: {Blurring} the {Distinction} between {DSL} and {GPL}}, + isbn = {978-1-4666-2092-6}, + url = {https://services.igi-global.com/resolvedoi/resolve.aspx?doi=10.4018/978-1-4666-2092-6.ch001}, + abstract = {Out of a concern for focus and concision, domain-specific languages (DSLs) are usually very different from general purpose programming languages (GPLs), both at the syntactic and the semantic levels. One approach to DSL implementation is to write a full language infrastructure, including parser, interpreter, or even compiler. Another approach however, is to ground the DSL into an extensible GPL, giving you control over its own syntax and semantics. The DSL may then be designed merely as an extension to the original GPL, and its implementation may boil down to expressing only the differences with it. The task of DSL implementation is hence considerably eased. The purpose of this chapter is to provide a tour of the features that make a GPL extensible, and to demonstrate how, in this context, the distinction between DSL and GPL can blur, sometimes to the point of complete disappearance.}, + booktitle = {Formal and {Practical} {Aspects} of {Domain}-{Specific} {Languages}: {Recent} {Developments}}, + publisher = {IGI Global}, + author = {Verna, Didier}, + editor = {Mernik, Marjan}, + year = {2013}, + doi = {10.4018/978-1-4666-2092-6.ch001}, + pages = {1--31}, +} + +@inproceedings{hudak_modular_1998, + title = {Modular domain specific languages and tools}, + doi = {10.1109/ICSR.1998.685738}, + booktitle = {Proceedings. {Fifth} {International} {Conference} on {Software} {Reuse} ({Cat}. {No}.{98TB100203})}, + author = {Hudak, P.}, + year = {1998}, + pages = {134--142}, + file = {Hudak - 1998 - Modular domain specific languages and tools.pdf:/home/mrl/.local/share/zotero/storage/JX7KZ2ST/Hudak - 1998 - Modular domain specific languages and tools.pdf:application/pdf}, +} + +@book{fowler_domain_2010, + edition = {1st}, + title = {Domain {Specific} {Languages}}, + isbn = {0-321-71294-3}, + abstract = {Designed as a wide-ranging guide to Domain Specific Languages (DSLs) and how to approach building them, this book covers a variety of different techniques available for DSLs. The goal is to provide readers with enough information to make an informed choice about whether or not to use a DSL and what kinds of DSL techniques to employ. Part I is a 150-page narrative overview that gives you a broad understanding of general principles. The reference material in Parts II through VI provides the details and examples you willneed to get started using the various techniques discussed. Both internal and external DSL topics are covered, in addition to alternative computational models and code generation. Although the general principles and patterns presented can be used with whatever programming language you happen to be using, most of the examples are in Java or C\#.}, + publisher = {Addison-Wesley Professional}, + author = {Fowler, Martin}, + year = {2010}, + file = {Fowler - 2010 - Domain-specific languages.pdf:/home/mrl/.local/share/zotero/storage/YYMYXTZ5/Fowler - 2010 - Domain-specific languages.pdf:application/pdf}, +} + +@misc{peter_t_lewis_speech_1985, + address = {Washington, D.C.}, + type = {Speech}, + title = {Speech}, + url = {http://www.chetansharma.com/correcting-the-iot-history/}, + author = {{Peter T. Lewis}}, + month = sep, + year = {1985}, +} + +@article{weiser_computer_1991, + title = {The {Computer} for the 21 st {Century}}, + volume = {265}, + language = {en}, + number = {3}, + journal = {Scientific American}, + author = {Weiser, Mark}, + month = sep, + year = {1991}, + pages = {94--105}, + file = {Weiser - 1991 - The Computer for the 21 st Century.pdf:/home/mrl/.local/share/zotero/storage/N5456M2M/Weiser - 1991 - The Computer for the 21 st Century.pdf:application/pdf}, +} + +@misc{evans_internet_2011, + title = {The {Internet} of {Things}: {How} the {Next} {Evolution} of the {Internet} {Is} {Changing} {Everything}}, + url = {https://www.cisco.com/c/dam/en_us/about/ac79/docs/innov/IoT_IBSG_0411FINAL.pdf}, + language = {en}, + publisher = {Cisco Internet Business Solutions Group (IBSG)}, + author = {Evans, Dave}, + month = apr, + year = {2011}, + file = {Evans - 2011 - How the Next Evolution of the Internet Is Changing.pdf:/home/mrl/.local/share/zotero/storage/32YXCM6P/Evans - 2011 - How the Next Evolution of the Internet Is Changing.pdf:application/pdf}, +} + +@inproceedings{ireland_classification_2009, + address = {Cancun, Mexico}, + title = {A {Classification} of {Object}-{Relational} {Impedance} {Mismatch}}, + isbn = {978-0-7695-3550-0}, + doi = {10.1109/DBKDA.2009.11}, + booktitle = {First {International} {Conference} on {Advances} in {Databases}, {Knowledge}, and {Data} {Applications}}, + publisher = {IEEE}, + author = {Ireland, Christopher and Bowers, David and Newton, Michael and Waugh, Kevin}, + year = {2009}, + pages = {36--43}, +} + +@techreport{cheney_first-class_2003, + title = {First-class phantom types}, + url = {https://ecommons.cornell.edu/handle/1813/5614}, + number = {TR2003-1901}, + urldate = {2017-05-15}, + institution = {Cornell University}, + author = {Cheney, James and Hinze, Ralf}, + year = {2003}, + file = {Cheney and Hinze - 2003 - First-class phantom types.pdf:/home/mrl/.local/share/zotero/storage/IBKGCFG2/Cheney and Hinze - 2003 - First-class phantom types.pdf:application/pdf}, +} + +@inproceedings{baars_typing_2002, + address = {New York, NY, USA}, + series = {{ICFP} '02}, + title = {Typing {Dynamic} {Typing}}, + isbn = {1-58113-487-8}, + url = {https://doi.org/10.1145/581478.581494}, + doi = {10.1145/581478.581494}, + abstract = {Even when programming in a statically typed language we every now and then encounter statically untypable values; such values result from interpreting values or from communicating with the outside world. To cope with this problem most languages include some form of dynamic types. It may be that the core language has been explicitly extended with such a type, or that one is allowed to live dangerously by using functions like unsafeCoerce. We show how, by a careful use of existentially and universally quantified types, one may achievem the same effect, without extending the language with new or unsafe features. The techniques explained are universally applicable, provided the core language is expressive enough; this is the case for the common implementations of Haskell. The techniques are used in the description of a type checking compiler that, starting from an expression term, constructs a typed function representing the semantics of that expression. In this function the overhead associated with the type checking is only once being paid for; in this sense we have thus achieved static type checking.}, + booktitle = {Proceedings of the {Seventh} {ACM} {SIGPLAN} {International} {Conference} on {Functional} {Programming}}, + publisher = {Association for Computing Machinery}, + author = {Baars, Arthur I. and Swierstra, S. Doaitse}, + year = {2002}, + note = {event-place: Pittsburgh, PA, USA}, + keywords = {coercions, dynamic typing, Haskell, Leibnitz' rule, quantified types, static typing, type equality, typed interpreters}, + pages = {157--166}, + file = {Baars and Swierstra - 2002 - Typing dynamic typing.pdf:/home/mrl/.local/share/zotero/storage/QSGVSTM4/Baars and Swierstra - 2002 - Typing dynamic typing.pdf:application/pdf}, +} + +@incollection{hinze_generic_2003, + address = {Berlin, Heidelberg}, + title = {Generic {Haskell}: {Practice} and {Theory}}, + isbn = {978-3-540-45191-4}, + url = {https://doi.org/10.1007/978-3-540-45191-4_1}, + abstract = {Generic Haskell is an extension of Haskell that supports the construction of generic programs. These lecture notes describe the basic constructs of Generic Haskell and highlight the underlying theory.}, + booktitle = {Generic {Programming}: {Advanced} {Lectures}}, + publisher = {Springer Berlin Heidelberg}, + author = {Hinze, Ralf and Jeuring, Johan}, + editor = {Backhouse, Roland and Gibbons, Jeremy}, + year = {2003}, + doi = {10.1007/978-3-540-45191-4_1}, + pages = {1--56}, + file = {Hinze and Jeuring - Generic Haskell practice and theory.pdf:/home/mrl/.local/share/zotero/storage/QDRNI5VB/Hinze and Jeuring - Generic Haskell practice and theory.pdf:application/pdf}, +} + +@misc{wadler_expression_1998, + title = {The expression problem}, + url = {https://homepages.inf.ed.ac.uk/wadler/papers/expression/expression.txt}, + language = {en}, + urldate = {2021-02-24}, + author = {Wadler, Philip}, + month = nov, + year = {1998}, + note = {e-mail message, accessed on 2021-02-24}, +} + +@misc{margaret_deuter_rhapsody_2015, + address = {Oxford}, + edition = {Ninth edition}, + title = {Rhapsody}, + journal = {Oxford Advanced Learner's Dictionary of Current English}, + publisher = {Oxford University Press}, + author = {{A S Hornby}}, + editor = {{Margaret Deuter} and {Jennifer Bradbery} and {Joanna Turnbull}}, + year = {2015}, +} + +@misc{wikipedia_contributors_rhapsody_2022, + title = {Rhapsody (music) — {Wikipedia}, {The} {Free} {Encyclopedia}}, + url = {https://en.wikipedia.org/w/index.php?title=Rhapsody_(music)\&oldid=1068385257}, + urldate = {2022-09-06}, + journal = {Wikipedia}, + author = {{Wikipedia contributors}}, + year = {2022}, + note = {accessed on: 2022-09-06}, +} + +@mastersthesis{crooijmans_reducing_2021, + address = {Nijmegen}, + title = {Reducing the {Power} {Consumption} of {IoT} {Devices} in {Task}-{Oriented} {Programming}}, + language = {en}, + school = {Radboud University}, + author = {Crooijmans, Sjoerd}, + month = jul, + year = {2021}, + file = {Crooijmans - 2021 - Reducing the Power Consumption of IoT Devices in T.pdf:/home/mrl/.local/share/zotero/storage/98LY9YHH/Crooijmans - 2021 - Reducing the Power Consumption of IoT Devices in T.pdf:application/pdf}, +} + +@mastersthesis{amazonas_cabral_de_andrade_developing_2018, + address = {Nijmegen}, + title = {Developing {Real} {Life}, {Task} {Oriented} {Applications} for the {Internet} of {Things}}, + shorttitle = {Developing {Real} {Life}, {TOP} {Applications} for the {IOT}}, + language = {en}, + school = {Radboud University}, + author = {Amazonas Cabral De Andrade, Matheus}, + year = {2018}, + file = {Lubbers - prof. dr. dr.h.c. ir. M.J. Plasmeijer.pdf:/home/mrl/.local/share/zotero/storage/JXPEWS85/Lubbers - prof. dr. dr.h.c. ir. M.J. Plasmeijer.pdf:application/pdf}, +} diff --git a/preamble.tex b/preamble.tex index 3320aa3..9f36877 100644 --- a/preamble.tex +++ b/preamble.tex @@ -30,13 +30,16 @@ \usepackage[dutch,russian,british]{babel} %\babelfont[russian]{rm}{Liberation Serif} -% Appendices -% TODO is this necessary? -%\usepackage[titletoc]{appendix} +% Appendices (used for subappendices, appendices per chapter) +\usepackage{appendix} + +% Cite bib entry completely +\usepackage{bibentry} +\nobibliography* % Hyperlinks -%\usepackage[pagebackref]{hyperref} -\usepackage[pagebackref]{hyperref} +\usepackage{bookmark,hyperref} +%\usepackage[pagebackref]{bookmark,hyperref} % Setup pdf parameters: TODO \hypersetup{% pdftitle={\mytitle}, @@ -61,6 +64,15 @@ % Nice tables \usepackage{booktabs} +% Multirow cells +\usepackage{multirow} + +% orcid icon +\usepackage{academicons} +\newcommand{\orcid}[1]{\href{https://orcid.org/#1}{\hspace{1mm}\includegraphics[width=1em]{orcid}\hspace{2mm} https://orcid.org/#1}} + +% IPA symbols +\usepackage{tipa} % Automatically wrapping tables \usepackage{tabularx} @@ -70,11 +82,12 @@ % Code \usepackage{stmaryrd} % Short arrow +\usepackage{textcomp} % upquote \usepackage{listings} % General listings settings \lstset{% basewidth=0.45em, - basicstyle=\linespread{0.9}\tt\footnotesize, + basicstyle=\tt\small, breakatwhitespace=false, breaklines=true, captionpos=b, @@ -90,40 +103,14 @@ showtabs=false, stringstyle=\it, tabsize=4, -% literate=% -% {a0}{{a\textsubscript{0}}}2 -% {a1}{{a\textsubscript{1}}}2 -% {a2}{{a\textsubscript{2}}}2 -%% {an}{{a\textsubscript{n}}}2 -% {c0}{{c\textsubscript{0}}}2 -% {c1}{{c\textsubscript{1}}}2 -% {c2}{{c\textsubscript{2}}}2 -% {cn}{{c\textsubscript{n}}}2 -% {f0}{{f\textsubscript{0}}}2 -% {f1}{{f\textsubscript{1}}}2 -% {f2}{{f\textsubscript{2}}}2 -% {fn}{{f\textsubscript{n}}}2 -% {t0}{{t\textsubscript{0}}}2 -% {t1}{{t\textsubscript{1}}}2 -% {t2}{{t\textsubscript{2}}}2 -% {tn}{{t\textsubscript{n}}}2 -% {v0}{{v\textsubscript{0}}}2 -% {v1}{{v\textsubscript{1}}}2 -% {v2}{{v\textsubscript{2}}}2 -% {vn}{{v\textsubscript{n}}}2 -% {C0}{{C\textsubscript{0}}}2 -% {C1}{{C\textsubscript{1}}}2 -% {C2}{{C\textsubscript{2}}}2 -% {Cn}{{C\textsubscript{n}}}2 -% {R0}{{R\textsubscript{0}}}2 -% {R1}{{R\textsubscript{1}}}2 -% {R2}{{R\textsubscript{2}}}2 -% {Rn}{{R\textsubscript{n}}}2 + upquote=true, } \usepackage{lstlangclean} \usepackage{lstlanghaskell} +\usepackage{lstlanghaskelllhstex} \newcommand{\cleaninline}[1]{\lstinline[language=Clean,postbreak=]|#1|} \newcommand{\haskellinline}[1]{\lstinline[language=Haskell,style=haskell,postbreak=]|#1|} +\newcommand{\haskelllhstexinline}[1]{\lstinline[language=Haskell,style=haskelllhstex,postbreak=]|#1|} % Fix list of listings title \renewcommand{\lstlistlistingname}{List of Listings} % Fix list of listings chapter separator @@ -146,6 +133,12 @@ \renewcommand*{\lstlistingname}{Listing (Haskell)} } {} +\lstnewenvironment{lstHaskellLhstex}[1][] + {% + \lstset{language=Haskell,style=haskelllhstex,#1}% + \renewcommand*{\lstlistingname}{Listing (Haskell)} + } + {} % Glossaries and acronyms \usepackage[acronym,nonumberlist]{glossaries} @@ -209,7 +202,8 @@ % Custom headers and footers \usepackage{fancyhdr} -\pagestyle{fancy} +%\pagestyle{fancy} +\pagestyle{headings} % Tables spanning pages \usepackage{longtable} diff --git a/self.bib b/self.bib new file mode 100644 index 0000000..5a970c3 --- /dev/null +++ b/self.bib @@ -0,0 +1,88 @@ +@inproceedings{lubbers_multitasking_2019, + address = {Opatija, Croatia}, + title = {Multitasking on {Microcontrollers} using {Task} {Oriented} {Programming}}, + copyright = {All rights reserved}, + shorttitle = {Multitasking on {Microcontrollers} using {TOP}}, + doi = {10.23919/MIPRO.2019.8756711}, + booktitle = {2019 42nd {International} {Convention} on {Information} and {Communication} {Technology}, {Electronics} and {Microelectronics} ({MIPRO})}, + author = {Lubbers, M. and Koopman, P. and Plasmeijer, R.}, + month = may, + year = {2019}, + pages = {1587--1592}, + file = {08756711.pdf:/home/mrl/.local/share/zotero/storage/4S44JZPK/08756711.pdf:application/pdf;mipro_2019_proceedings.pdf:/home/mrl/.local/share/zotero/storage/5FFJLMTG/mipro_2019_proceedings.pdf:application/pdf;03_4cows_5338.pdf:/home/mrl/.local/share/zotero/storage/3QJTF5HT/03_4cows_5338.pdf:application/pdf} +} + +@inproceedings{lubbers_task_2018, + address = {Lowell, MA}, + title = {Task {Oriented} {Programming} and the {Internet} of {Things}}, + copyright = {All rights reserved}, + isbn = {978-1-4503-7143-8}, + doi = {10.1145/3310232.3310239}, + abstract = {In the omnipresent Internet of Things (IoT), tiny devices sense and alter the environment, process information and communicate with the world. These devices have limited amounts of processing power and memory. This imposes severe restrictions on their software and communication protocols. As a result, applications are composed of parts written in various programming languages that communicate in many different ways. This impedance mismatch hampers development and maintenance. In previous work we have shown how an IoT device can be programmed by defining an embedded Domain Specific Language (eDSL). In this paper we show how IoT tasks can be seemlessly integrated with a Task Oriented Programming (TOP) server such as iTasks. It allows the specification on a high-level of abstraction of arbitrary collaborations between human beings, large systems, and now also IoT devices. The implementation is made in three steps. First, there is an interface to connect devices dynamically to an iTasks server using various communication protocols. Next, we solve the communication problem between IoT devices and the server by porting Shared Data Sources (SDSs) from TOP. As a result, data can be shared, viewed and updated from the server or IoT device. Finally, we crack the maintenance problem by switching from generating fixed code for the IoT devices to dynamically shipping code. It makes it possible to run multiple tasks on an IoT device and to decide at runtime what tasks that should be.}, + language = {en}, + booktitle = {Proceedings of the 30th {Symposium} on the {Implementation} and {Application} of {Functional} {Programming} {Languages}}, + publisher = {ACM}, + author = {Lubbers, Mart and Koopman, Pieter and Plasmeijer, Rinus}, + year = {2018}, + pages = {83--94}, + file = {Lubbers et al. - 2018 - Task Oriented Programming and the Internet of Thin.pdf:/home/mrl/.local/share/zotero/storage/3E5KLI5V/Lubbers et al. - 2018 - Task Oriented Programming and the Internet of Thin.pdf:application/pdf} +} + +@mastersthesis{lubbers_task_2017, + address = {Nijmegen}, + title = {Task {Oriented} {Programming} and the {Internet} of {Things}}, + copyright = {All rights reserved}, + shorttitle = {{TOP} and the {IoT}}, + school = {Radboud University}, + author = {Lubbers, Mart}, + year = {2017}, + file = {thesis.pdf:/home/mrl/.local/share/zotero/storage/M49MWHPX/thesis.pdf:application/pdf} +} + +@inproceedings{lubbers_interpreting_2019, + address = {Singapore}, + title = {Interpreting {Task} {Oriented} {Programs} on {Tiny} {Computers}}, + copyright = {All rights reserved}, + language = {en}, + booktitle = {Proceedings of the 31th {Symposium} on the {Implementation} and {Application} of {Functional} {Programming} {Languages}}, + publisher = {ACM}, + author = {Lubbers, Mart and Koopman, Pieter and Plasmeijer, Rinus}, + year = {2019}, + pages = {12}, + file = {pre-conference-proceedings.pdf:/home/mrl/.local/share/zotero/storage/E4R53TGR/pre-conference-proceedings.pdf:application/pdf;Lubbers et al. - 2020 - Interpreting Task Oriented Programs on Tiny Comput.pdf:/home/mrl/.local/share/zotero/storage/8QXYMUIX/Lubbers et al. - 2020 - Interpreting Task Oriented Programs on Tiny Comput.pdf:application/pdf} +} + +@inproceedings{koopman_task-based_2018, + address = {Vienna, Austria}, + title = {A {Task}-{Based} {DSL} for {Microcomputers}}, + copyright = {All rights reserved}, + isbn = {978-1-4503-6355-6}, + url = {http://dl.acm.org/citation.cfm?doid=3183895.3183902}, + doi = {10.1145/3183895.3183902}, + abstract = {The Internet of Things, IoT, makes small connected computing devices almost omnipresent. These devices have typically very limited computing power and severe memory restrictions to make them cheap and power efficient. These devices can interact with the environment via special sensors and actuators. Since each device controls several peripherals running interleaved, the control software is quite complicated and hard to maintain. Task Oriented Programming, TOP, offers lightweight communicating threads that can inspect each other’s intermediate results. This makes it well suited for the IoT. In this paper presents a functional task-based domain specific language for these IoT devices. We show that it yields concise control programs. By restricting the datatypes and using strict evaluation these programs fit within the restrictions of microcontrollers.}, + language = {en}, + urldate = {2019-01-14}, + booktitle = {Proceedings of the {Real} {World} {Domain} {Specific} {Languages} {Workshop} 2018 on - {RWDSL2018}}, + publisher = {ACM Press}, + author = {Koopman, Pieter and Lubbers, Mart and Plasmeijer, Rinus}, + year = {2018}, + pages = {1--11}, + file = {a4-Koopman.pdf:/home/mrl/.local/share/zotero/storage/TXZD529C/a4-Koopman.pdf:application/pdf;Koopman et al. - 2018 - A Task-Based DSL for Microcomputers.pdf:/home/mrl/.local/share/zotero/storage/9ETMTMX2/Koopman et al. - 2018 - A Task-Based DSL for Microcomputers.pdf:application/pdf} +} + +@inproceedings{lubbers_tiered_2020, + address = {Malmö}, + series = {{IoT} '20}, + title = {Tiered versus {Tierless} {IoT} {Stacks}: {Comparing} {Smart} {Campus} {Software} {Architectures}}, + isbn = {978-1-4503-8758-3}, + url = {https://doi.org/10.1145/3410992.3411002}, + doi = {10.1145/3410992.3411002}, + abstract = {Internet of Things (IoT) software stacks are notoriously complex, conventionally comprising multiple tiers/components and requiring that the developer not only uses multiple programming languages, but also correctly interoperate the components. A novel alternative is to use a single tierless language with a compiler that generates the code for each component, and for their correct interoperation.We report the first ever systematic comparison of tiered and tierless IoT software architectures. The comparison is based on two implementations of a non-trivial smart campus application. PRSS has a conventional tiered Python-based architecture, and Clean Wemos Super Sensors (CWSS) has a novel tierless architecture based on Clean and the iTask and mTask embedded DSLs. An operational comparison of CWSS and PRSS demonstrates that they have equivalent functionality, and that both meet the University of Glasgow (UoG) smart campus requirements.Crucially, the tierless CWSS stack requires 70\% less code than the tiered PRSS stack. We analyse the impact of the following three main factors. (1) Tierless developers need to manage less interoperation: CWSS uses two DSLs in a single paradigm where PRSS uses five languages and three paradigms. (2) Tierless developers benefit from automatically generated, and hence correct, communication. (3) Tierless developers can exploit the powerful high-level abstractions such as Task Oriented Programming (TOP) in CWSS. A far smaller and single paradigm codebase improves software quality, dramatically reduces development time, and improves the maintainability of tierless stacks.}, + booktitle = {Proceedings of the 10th {International} {Conference} on the {Internet} of {Things}}, + publisher = {Association for Computing Machinery}, + author = {Lubbers, Mart and Koopman, Pieter and Ramsingh, Adrian and Singer, Jeremy and Trinder, Phil}, + year = {2020}, + note = {event-place: Malmö, Sweden}, + keywords = {domain specific languages, internet of things, network reliability, software architectures}, + file = {Lubbers et al. - 2020 - Tiered versus Tierless IoT Stacks Comparing Smart.pdf:/home/mrl/.local/share/zotero/storage/YY3MJRZ6/Lubbers et al. - 2020 - Tiered versus Tierless IoT Stacks Comparing Smart.pdf:application/pdf} +} diff --git a/task_oriented_programming/beyond_microprocessors.tex b/task_oriented_programming/beyond_microprocessors.tex new file mode 100644 index 0000000..467e1a0 --- /dev/null +++ b/task_oriented_programming/beyond_microprocessors.tex @@ -0,0 +1,12 @@ +\documentclass[../thesis.tex]{subfiles} + +\begin{document} +\ifSubfilesClassLoaded{ + \pagenumbering{arabic} +}{} + +\chapter{\texorpdfstring{\acrshort{TOP}}{TOP} for \texorpdfstring{\acrshort{IOT}}{IoT} beyond microprocessors} +Async shares paper (parts of TIOT). + +\input{subfilepostamble} +\end{document} diff --git a/thesis.bib b/thesis.bib deleted file mode 100644 index ffdf65f..0000000 --- a/thesis.bib +++ /dev/null @@ -1,456 +0,0 @@ -@inproceedings{lubbers_multitasking_2019, - address = {Opatija, Croatia}, - title = {Multitasking on {Microcontrollers} using {Task} {Oriented} {Programming}}, - copyright = {All rights reserved}, - shorttitle = {Multitasking on {Microcontrollers} using {TOP}}, - doi = {10.23919/MIPRO.2019.8756711}, - booktitle = {2019 42nd {International} {Convention} on {Information} and {Communication} {Technology}, {Electronics} and {Microelectronics} ({MIPRO})}, - author = {Lubbers, M. and Koopman, P. and Plasmeijer, R.}, - month = may, - year = {2019}, - pages = {1587--1592}, - file = {08756711.pdf:/home/mrl/.local/share/zotero/storage/4S44JZPK/08756711.pdf:application/pdf;mipro_2019_proceedings.pdf:/home/mrl/.local/share/zotero/storage/5FFJLMTG/mipro_2019_proceedings.pdf:application/pdf;03_4cows_5338.pdf:/home/mrl/.local/share/zotero/storage/3QJTF5HT/03_4cows_5338.pdf:application/pdf} -} - -@inproceedings{lubbers_task_2018, - address = {Lowell, MA}, - title = {Task {Oriented} {Programming} and the {Internet} of {Things}}, - copyright = {All rights reserved}, - isbn = {978-1-4503-7143-8}, - doi = {10.1145/3310232.3310239}, - abstract = {In the omnipresent Internet of Things (IoT), tiny devices sense and alter the environment, process information and communicate with the world. These devices have limited amounts of processing power and memory. This imposes severe restrictions on their software and communication protocols. As a result, applications are composed of parts written in various programming languages that communicate in many different ways. This impedance mismatch hampers development and maintenance. In previous work we have shown how an IoT device can be programmed by defining an embedded Domain Specific Language (eDSL). In this paper we show how IoT tasks can be seemlessly integrated with a Task Oriented Programming (TOP) server such as iTasks. It allows the specification on a high-level of abstraction of arbitrary collaborations between human beings, large systems, and now also IoT devices. The implementation is made in three steps. First, there is an interface to connect devices dynamically to an iTasks server using various communication protocols. Next, we solve the communication problem between IoT devices and the server by porting Shared Data Sources (SDSs) from TOP. As a result, data can be shared, viewed and updated from the server or IoT device. Finally, we crack the maintenance problem by switching from generating fixed code for the IoT devices to dynamically shipping code. It makes it possible to run multiple tasks on an IoT device and to decide at runtime what tasks that should be.}, - language = {en}, - booktitle = {Proceedings of the 30th {Symposium} on the {Implementation} and {Application} of {Functional} {Programming} {Languages}}, - publisher = {ACM}, - author = {Lubbers, Mart and Koopman, Pieter and Plasmeijer, Rinus}, - year = {2018}, - pages = {83--94}, - file = {Lubbers et al. - 2018 - Task Oriented Programming and the Internet of Thin.pdf:/home/mrl/.local/share/zotero/storage/3E5KLI5V/Lubbers et al. - 2018 - Task Oriented Programming and the Internet of Thin.pdf:application/pdf} -} - -@mastersthesis{lubbers_task_2017, - address = {Nijmegen}, - title = {Task {Oriented} {Programming} and the {Internet} of {Things}}, - copyright = {All rights reserved}, - shorttitle = {{TOP} and the {IoT}}, - school = {Radboud University}, - author = {Lubbers, Mart}, - year = {2017}, - file = {thesis.pdf:/home/mrl/.local/share/zotero/storage/M49MWHPX/thesis.pdf:application/pdf} -} - -@incollection{lubbers_writing_2019, - address = {Cham}, - title = {Writing {Internet} of {Things} applications with {Task} {Oriented} {Programming}}, - abstract = {The Internet of Things (IOT) is growing fast. In 2018, there was approximately one connected device per person on earth and the number has been growing ever since. The devices interact with the environment via different modalities at the same time using sensors and actuators making the programs parallel. Yet, writing this type of programs is difficult because the devices have little computation power and memory, the platforms are heterogeneous and the languages are low level. Task Oriented Programming (TOP) is a novel declarative programming language paradigm that is used to express coordination of work, collaboration of users and systems, the distribution of shared data and the human computer interaction. The mTask language is a specialized, yet full-fledged, multi-backend TOP language for IOT devices. With the bytecode interpretation backend and the integration with iTasks, tasks can be executed on the device dynamically. This means that —according to the current state of affairs— tasks can be tailor-made at run time, compiled to device-agnostic bytecode and shipped to the device for interpretation. Tasks sent to the device are fully integrated in iTasks to allow every form of interaction with the tasks such as observation of the task value and interaction with Shared Data Sources (SDSs). The application is —server and devices— are programmed in a single language, albeit using two embedded Domain Specific Languages (EDSLs).}, - language = {en}, - booktitle = {Central {European} {Functional} {Programming} {School}: 8th {Summer} {School}, {CEFP} 2019, {Budapest}, {Hungary}, {July} 17–21, 2019, {Revised} {Selected} {Papers}}, - publisher = {Springer International Publishing}, - author = {Lubbers, Mart and Koopman, Pieter and Plasmeijer, Rinus}, - year = {2019}, - pages = {51}, - file = {Lubbers - Writing Internet of Things applications with Task .pdf:/home/mrl/.local/share/zotero/storage/ILZIBYW5/Lubbers - Writing Internet of Things applications with Task .pdf:application/pdf} -} - -@inproceedings{lubbers_interpreting_2019, - address = {Singapore}, - title = {Interpreting {Task} {Oriented} {Programs} on {Tiny} {Computers}}, - copyright = {All rights reserved}, - language = {en}, - booktitle = {Proceedings of the 31th {Symposium} on the {Implementation} and {Application} of {Functional} {Programming} {Languages}}, - publisher = {ACM}, - author = {Lubbers, Mart and Koopman, Pieter and Plasmeijer, Rinus}, - year = {2019}, - pages = {12}, - file = {pre-conference-proceedings.pdf:/home/mrl/.local/share/zotero/storage/E4R53TGR/pre-conference-proceedings.pdf:application/pdf;Lubbers et al. - 2020 - Interpreting Task Oriented Programs on Tiny Comput.pdf:/home/mrl/.local/share/zotero/storage/8QXYMUIX/Lubbers et al. - 2020 - Interpreting Task Oriented Programs on Tiny Comput.pdf:application/pdf} -} - -@inproceedings{koopman_task-based_2018, - address = {Vienna, Austria}, - title = {A {Task}-{Based} {DSL} for {Microcomputers}}, - copyright = {All rights reserved}, - isbn = {978-1-4503-6355-6}, - url = {http://dl.acm.org/citation.cfm?doid=3183895.3183902}, - doi = {10.1145/3183895.3183902}, - abstract = {The Internet of Things, IoT, makes small connected computing devices almost omnipresent. These devices have typically very limited computing power and severe memory restrictions to make them cheap and power efficient. These devices can interact with the environment via special sensors and actuators. Since each device controls several peripherals running interleaved, the control software is quite complicated and hard to maintain. Task Oriented Programming, TOP, offers lightweight communicating threads that can inspect each other’s intermediate results. This makes it well suited for the IoT. In this paper presents a functional task-based domain specific language for these IoT devices. We show that it yields concise control programs. By restricting the datatypes and using strict evaluation these programs fit within the restrictions of microcontrollers.}, - language = {en}, - urldate = {2019-01-14}, - booktitle = {Proceedings of the {Real} {World} {Domain} {Specific} {Languages} {Workshop} 2018 on - {RWDSL2018}}, - publisher = {ACM Press}, - author = {Koopman, Pieter and Lubbers, Mart and Plasmeijer, Rinus}, - year = {2018}, - pages = {1--11}, - file = {a4-Koopman.pdf:/home/mrl/.local/share/zotero/storage/TXZD529C/a4-Koopman.pdf:application/pdf;Koopman et al. - 2018 - A Task-Based DSL for Microcomputers.pdf:/home/mrl/.local/share/zotero/storage/9ETMTMX2/Koopman et al. - 2018 - A Task-Based DSL for Microcomputers.pdf:application/pdf} -} - -@inproceedings{lubbers_tiered_2020, - address = {Malmö}, - series = {{IoT} '20}, - title = {Tiered versus {Tierless} {IoT} {Stacks}: {Comparing} {Smart} {Campus} {Software} {Architectures}}, - isbn = {978-1-4503-8758-3}, - url = {https://doi.org/10.1145/3410992.3411002}, - doi = {10.1145/3410992.3411002}, - abstract = {Internet of Things (IoT) software stacks are notoriously complex, conventionally comprising multiple tiers/components and requiring that the developer not only uses multiple programming languages, but also correctly interoperate the components. A novel alternative is to use a single tierless language with a compiler that generates the code for each component, and for their correct interoperation.We report the first ever systematic comparison of tiered and tierless IoT software architectures. The comparison is based on two implementations of a non-trivial smart campus application. PRSS has a conventional tiered Python-based architecture, and Clean Wemos Super Sensors (CWSS) has a novel tierless architecture based on Clean and the iTask and mTask embedded DSLs. An operational comparison of CWSS and PRSS demonstrates that they have equivalent functionality, and that both meet the University of Glasgow (UoG) smart campus requirements.Crucially, the tierless CWSS stack requires 70\% less code than the tiered PRSS stack. We analyse the impact of the following three main factors. (1) Tierless developers need to manage less interoperation: CWSS uses two DSLs in a single paradigm where PRSS uses five languages and three paradigms. (2) Tierless developers benefit from automatically generated, and hence correct, communication. (3) Tierless developers can exploit the powerful high-level abstractions such as Task Oriented Programming (TOP) in CWSS. A far smaller and single paradigm codebase improves software quality, dramatically reduces development time, and improves the maintainability of tierless stacks.}, - booktitle = {Proceedings of the 10th {International} {Conference} on the {Internet} of {Things}}, - publisher = {Association for Computing Machinery}, - author = {Lubbers, Mart and Koopman, Pieter and Ramsingh, Adrian and Singer, Jeremy and Trinder, Phil}, - year = {2020}, - note = {event-place: Malmö, Sweden}, - keywords = {domain specific languages, internet of things, network reliability, software architectures}, - file = {Lubbers et al. - 2020 - Tiered versus Tierless IoT Stacks Comparing Smart.pdf:/home/mrl/.local/share/zotero/storage/YY3MJRZ6/Lubbers et al. - 2020 - Tiered versus Tierless IoT Stacks Comparing Smart.pdf:application/pdf} -} - -@misc{achten_clean_2007, - title = {Clean for {Haskell98} {Programmers}}, - language = {en}, - author = {Achten, Peter}, - month = jul, - year = {2007}, - file = {Achten - Clean for Haskell98 Programmers.pdf:/home/mrl/.local/share/zotero/storage/69WWSGLF/Achten - Clean for Haskell98 Programmers.pdf:application/pdf}, -} - -@article{groningen_exchanging_2010, - title = {Exchanging sources between {Clean} and {Haskell}: {A} double-edged front end for the {Clean} compiler}, - volume = {45}, - shorttitle = {Exchanging sources between {Clean} and {Haskell}}, - number = {11}, - journal = {ACM Sigplan Notices}, - author = {Groningen, John van and Noort, Thomas van and Achten, Peter and Koopman, Pieter and Plasmeijer, Rinus}, - year = {2010}, - pages = {49--60}, - file = {groj10-Haskell_front_end_Clean.pdf:/home/mrl/.local/share/zotero/storage/WVZWX8WT/groj10-Haskell_front_end_Clean.pdf:application/pdf}, -} - -@techreport{plasmeijer_clean_2021, - address = {Nijmegen}, - title = {Clean {Language} {Report} version 3.1}, - urldate = {2021-12-22}, - institution = {Institute for Computing and Information Sciences}, - author = {Plasmeijer, Rinus and van Eekelen, Marko and van Groningen, John}, - month = dec, - year = {2021}, - pages = {127}, - file = {CleanLanguageReport.pdf:/home/mrl/.local/share/zotero/storage/I2SDRIH6/CleanLanguageReport.pdf:application/pdf}, -} - -@inproceedings{brus_clean_1987, - address = {Berlin, Heidelberg}, - title = {Clean — {A} language for functional graph rewriting}, - isbn = {978-3-540-47879-9}, - abstract = {Clean is an experimental language for specifying functional computations in terms of graph rewriting. It is based on an extension of Term Rewriting Systems (TRS) in which the terms are replaced by graphs. Such a Graph Rewriting System (GRS) consists of a, possibly cyclic, directed graph, called the data graph and graph rewrite rules which specify how this data graph may be rewritten. Clean is designed to provide a firm base for functional programming. In particular, Clean is suitable as an intermediate language between functional languages and (parallel) target machine architectures. A sequential implementation of Clean on a conventional machine is described and its performance is compared with other systems. The results show that Clean can be efficiently implemented.}, - booktitle = {Functional {Programming} {Languages} and {Computer} {Architecture}}, - publisher = {Springer Berlin Heidelberg}, - author = {Brus, T. H. and van Eekelen, M. C. J. D. and van Leer, M. O. and Plasmeijer, M. J.}, - editor = {Kahn, Gilles}, - year = {1987}, - pages = {364--384}, - file = {brut87-Clean.ps.gz:/home/mrl/.local/share/zotero/storage/T2QATWIE/brut87-Clean.ps.gz:application/gzip}, -} - -@inproceedings{barendregt_towards_1987, - title = {Towards an intermediate language for graph rewriting}, - volume = {1}, - booktitle = {{PARLE}, {Parallel} {Architectures} and {Languages} {Europe}}, - publisher = {Springer Verlag}, - author = {Barendregt, HP and van Eekelen, MCJD and Glauert, JRW and Kennaway, JR and Plasmeijer, MJ and Sleep, MR}, - year = {1987}, - pages = {159--174}, - file = {barh87-Lean.ps.gz:/home/mrl/.local/share/zotero/storage/63FBHND7/barh87-Lean.ps.gz:application/gzip}, -} - -@inproceedings{nocker_concurrent_1991, - address = {Berlin, Heidelberg}, - title = {Concurrent clean}, - isbn = {978-3-540-47472-2}, - abstract = {Concurrent Clean is an experimental, lazy, higher-order parallel functional programming language based on term graph rewriting. An important difference with other languages is that in Clean graphs are manipulated and not terms. This can be used by the programmer to control communication and sharing of computation. Cyclic structures can be defined. Concurrent Clean furthermore allows to control the (parallel) order of evaluation to make efficient evaluation possible. With help of sequential annotations the default lazy evaluation can be locally changed into eager evaluation. The language enables the definition of partially strict data structures which make a whole new class of algorithms feasible in a functional language. A powerful and fast strictness analyser is incorporated in the system. The quality of the code generated by the Clean compiler has been greatly improved such that it is one of the best code generators for a lazy functional language. Two very powerful parallel annotations enable the programmer to define concurrent functional programs with arbitrary process topologies. Concurrent Clean is set up in such a way that the efficiency achieved for the sequential case can largely be maintained for a parallel implementation on loosely coupled parallel machine architectures.}, - booktitle = {{PARLE} '91 {Parallel} {Architectures} and {Languages} {Europe}}, - publisher = {Springer Berlin Heidelberg}, - author = {Nöcker, E. G. J. M. H. and Smetsers, J. E. W. and van Eekelen, M. C. J. D. and Plasmeijer, M. J.}, - editor = {Aarts, Emile H. L. and van Leeuwen, Jan and Rem, Martin}, - year = {1991}, - pages = {202--219}, - file = {Nöcker et al. - 1991 - Concurrent clean.pdf:/home/mrl/.local/share/zotero/storage/XHTNR7BR/Nöcker et al. - 1991 - Concurrent clean.pdf:application/pdf}, -} - -@phdthesis{alimarine_generic_2005, - address = {Nijmegen}, - type = {{PhD}}, - title = {Generic {Functional} {Programming}}, - language = {en}, - school = {Radboud University}, - author = {Alimarine, Artem}, - year = {2005}, - file = {Alimarine - Generic Functional Programming.pdf:/home/mrl/.local/share/zotero/storage/PDTS3SGX/Alimarine - Generic Functional Programming.pdf:application/pdf}, -} - -@misc{ghc_team_ghc_2021, - title = {{GHC} {User}’s {Guide} {Documentation}}, - url = {https://downloads.haskell.org/~ghc/latest/docs/users_guide.pdf}, - language = {English}, - urldate = {2021-02-24}, - publisher = {Release}, - author = {GHC Team}, - year = {2021}, -} - -@inproceedings{cheney_lightweight_2002, - title = {A lightweight implementation of generics and dynamics}, - url = {http://dl.acm.org/citation.cfm?id=581698}, - urldate = {2017-05-15}, - booktitle = {Proceedings of the 2002 {ACM} {SIGPLAN} workshop on {Haskell}}, - publisher = {ACM}, - author = {Cheney, James and Hinze, Ralf}, - year = {2002}, - keywords = {dynamic typing, generic programming, type representations}, - pages = {90--104}, - file = {Cheney and Hinze - 2002 - A lightweight implementation of generics and dynam.pdf:/home/mrl/.local/share/zotero/storage/FZ6EGJRJ/Cheney and Hinze - 2002 - A lightweight implementation of generics and dynam.pdf:application/pdf;HW02.pdf:/home/mrl/.local/share/zotero/storage/A8Z49NK6/HW02.pdf:application/pdf}, -} - -@incollection{hinze_fun_2003, - address = {Palgrave}, - series = {Cornerstones of {Computing}}, - title = {Fun {With} {Phantom} {Types}}, - isbn = {978-0-333-99285-2}, - booktitle = {The {Fun} of {Programming}}, - publisher = {Bloomsbury Publishing}, - author = {Hinze, Ralf}, - editor = {Gibbons, Jeremy and de Moor, Oege}, - year = {2003}, - pages = {245--262}, -} - -@inproceedings{jeuring_polytypic_1996, - address = {Berlin, Heidelberg}, - title = {Polytypic programming}, - isbn = {978-3-540-70639-7}, - abstract = {Many functions have to be written over and over again for different datatypes, either because datatypes change during the development of programs, or because functions with similar functionality are needed on different datatypes. Examples of such functions are pretty printers, debuggers, equality functions, unifiers, pattern matchers, rewriting functions, etc. Such functions are called polytypic functions. A polytypic function is a function that is defined by induction on the structure of user-defined datatypes. This paper introduces polytypic functions, and shows how to construct and reason about polytypic functions. A larger example is studied in detail: polytypic functions for term rewriting and for determining whether a collection of rewrite rules is normalising.}, - booktitle = {Advanced {Functional} {Programming}}, - publisher = {Springer Berlin Heidelberg}, - author = {Jeuring, Johan and Jansson, Patrik}, - editor = {Launchbury, John and Meijer, Erik and Sheard, Tim}, - year = {1996}, - pages = {68--114}, - file = {Jeuring and Jansson - 1996 - Polytypic programming.pdf:/home/mrl/.local/share/zotero/storage/SLC4G2IT/Jeuring and Jansson - 1996 - Polytypic programming.pdf:application/pdf}, -} -@techreport{cheney_first-class_2003, - title = {First-class phantom types}, - url = {https://ecommons.cornell.edu/handle/1813/5614}, - urldate = {2017-05-15}, - institution = {Cornell University}, - author = {Cheney, James and Hinze, Ralf}, - year = {2003}, - file = {https\://ecommons.cornell.edu/bitstream/handle/1813/5614/?sequence=1:/home/mrl/.local/share/zotero/storage/R5IFMHTP/5614.pdf:application/pdf} -} -@incollection{mernik_extensible_2013, - address = {Hershey, PA, USA}, - title = {Extensible {Languages}: {Blurring} the {Distinction} between {DSL} and {GPL}}, - isbn = {978-1-4666-2092-6}, - url = {https://services.igi-global.com/resolvedoi/resolve.aspx?doi=10.4018/978-1-4666-2092-6.ch001}, - abstract = {Out of a concern for focus and concision, domain-specific languages (DSLs) are usually very different from general purpose programming languages (GPLs), both at the syntactic and the semantic levels. One approach to DSL implementation is to write a full language infrastructure, including parser, interpreter, or even compiler. Another approach however, is to ground the DSL into an extensible GPL, giving you control over its own syntax and semantics. The DSL may then be designed merely as an extension to the original GPL, and its implementation may boil down to expressing only the differences with it. The task of DSL implementation is hence considerably eased. The purpose of this chapter is to provide a tour of the features that make a GPL extensible, and to demonstrate how, in this context, the distinction between DSL and GPL can blur, sometimes to the point of complete disappearance.}, - booktitle = {Formal and {Practical} {Aspects} of {Domain}-{Specific} {Languages}: {Recent} {Developments}}, - publisher = {IGI Global}, - author = {Verna, Didier}, - editor = {Mernik, Marjan}, - year = {2013}, - doi = {10.4018/978-1-4666-2092-6.ch001}, - pages = {1--31}, -} - -@article{tratt_domain_2008, - title = {Domain {Specific} {Language} {Implementation} via {Compile}-{Time} {Meta}-{Programming}}, - volume = {30}, - issn = {0164-0925}, - url = {https://doi.org/10.1145/1391956.1391958}, - doi = {10.1145/1391956.1391958}, - abstract = {Domain specific languages (DSLs) are mini-languages that are increasingly seen as being a valuable tool for software developers and non-developers alike. DSLs must currently be created in an ad-hoc fashion, often leading to high development costs and implementations of variable quality. In this article, I show how expressive DSLs can be hygienically embedded in the Converge programming language using its compile-time meta-programming facility, the concept of DSL blocks, and specialised error reporting techniques. By making use of pre-existing facilities, and following a simple methodology, DSL implementation costs can be significantly reduced whilst leading to higher quality DSL implementations.}, - number = {6}, - journal = {ACM Trans. Program. Lang. Syst.}, - author = {Tratt, Laurence}, - month = oct, - year = {2008}, - note = {Place: New York, NY, USA -Publisher: Association for Computing Machinery}, - keywords = {compile-time meta-programming, domain specific languages, Syntax extension}, - file = {Tratt - 2008 - Domain Specific Language Implementation via Compil.pdf:/home/mrl/.local/share/zotero/storage/HHGYJK4H/Tratt - 2008 - Domain Specific Language Implementation via Compil.pdf:application/pdf}, -} - -@inproceedings{sheard_accomplishments_2001, - address = {Berlin, Heidelberg}, - title = {Accomplishments and {Research} {Challenges} in {Meta}-programming}, - isbn = {978-3-540-44806-8}, - abstract = {In the last ten years the study of meta-programming systems, as formal systems worthy of study in their own right, has vastly accelerated. In that time a lot has been accomplished, yet much remains to be done. In this invited talk I wish to review recent accomplishments and future research challenges in hopes that this will spur interest in meta-programming in general and lead to new and better meta-programming systems.}, - booktitle = {Semantics, {Applications}, and {Implementation} of {Program} {Generation}}, - publisher = {Springer Berlin Heidelberg}, - author = {Sheard, Tim}, - editor = {Taha, Walid}, - year = {2001}, - pages = {2--44}, - file = {Sheard - 2001 - Accomplishments and Research Challenges in Meta-pr.pdf:/home/mrl/.local/share/zotero/storage/M7NT6USA/Sheard - 2001 - Accomplishments and Research Challenges in Meta-pr.pdf:application/pdf}, -} - -@incollection{gibbons_functional_2015, - address = {Cham}, - title = {Functional {Programming} for {Domain}-{Specific} {Languages}}, - isbn = {978-3-319-15940-9}, - url = {https://doi.org/10.1007/978-3-319-15940-9_1}, - abstract = {Domain-specific languages are a popular application area for functional programming; and conversely, functional programming is a popular implementation vehicle for domain-specific languages—at least, for embedded ones. Why is this? The appeal of embedded domain-specific languages is greatly enhanced by the presence of convenient lightweight tools for defining, implementing, and optimising new languages; such tools represent one of functional programming's strengths. In these lectures we discuss functional programming techniques for embedded domain-specific languages; we focus especially on algebraic datatypes and higher-order functions, and their influence on deep and shallow embeddings.}, - booktitle = {Central {European} {Functional} {Programming} {School}: 5th {Summer} {School}, {CEFP} 2013, {Cluj}-{Napoca}, {Romania}, {July} 8-20, 2013, {Revised} {Selected} {Papers}}, - publisher = {Springer International Publishing}, - author = {Gibbons, Jeremy}, - editor = {Zsók, Viktória and Horváth, Zoltán and Csató, Lehel}, - year = {2015}, - doi = {10.1007/978-3-319-15940-9_1}, - pages = {1--28}, - file = {Gibbons - 2015 - Functional Programming for Domain-Specific Languag.pdf:/home/mrl/.local/share/zotero/storage/ARUBLFU6/Gibbons - 2015 - Functional Programming for Domain-Specific Languag.pdf:application/pdf}, -} - -@article{elliott_compiling_2003, - title = {Compiling embedded languages}, - volume = {13}, - doi = {10.1017/S0956796802004574}, - number = {3}, - journal = {Journal of Functional Programming}, - author = {Elliott, Conal and Finne, Sigbjørn and de Moor, Oege}, - year = {2003}, - note = {Publisher: Cambridge University Press}, - pages = {455--481}, - file = {Elliott et al. - 2003 - Compiling embedded languages.pdf:/home/mrl/.local/share/zotero/storage/3X4Z6AKB/Elliott et al. - 2003 - Compiling embedded languages.pdf:application/pdf}, -} - -@inproceedings{hudak_modular_1998, - title = {Modular domain specific languages and tools}, - doi = {10.1109/ICSR.1998.685738}, - booktitle = {Proceedings. {Fifth} {International} {Conference} on {Software} {Reuse} ({Cat}. {No}.{98TB100203})}, - author = {Hudak, P.}, - year = {1998}, - pages = {134--142}, - file = {Hudak - 1998 - Modular domain specific languages and tools.pdf:/home/mrl/.local/share/zotero/storage/JX7KZ2ST/Hudak - 1998 - Modular domain specific languages and tools.pdf:application/pdf}, -} - -@inproceedings{baccelli_reprogramming_2018, - title = {Reprogramming {Low}-end {IoT} {Devices} from the {Cloud}}, - booktitle = {2018 3rd {Cloudification} of the {Internet} of {Things} ({CIoT})}, - publisher = {IEEE}, - author = {Baccelli, Emmanuel and Doerr, Joerg and Jallouli, Ons and Kikuchi, Shinji and Morgenstern, Andreas and Padilla, Francisco Acosta and Schleiser, Kaspar and Thomas, Ian}, - year = {2018}, - pages = {1--6}, - file = {Baccelli et al. - 2018 - Reprogramming Low-end IoT Devices from the Cloud.pdf:/home/mrl/.local/share/zotero/storage/M6LX5ZJN/Baccelli et al. - 2018 - Reprogramming Low-end IoT Devices from the Cloud.pdf:application/pdf}, -} - -@inproceedings{baccelli_scripting_2018, - title = {Scripting {Over}-{The}-{Air}: {Towards} {Containers} on {Low}-end {Devices} in the {Internet} of {Things}}, - booktitle = {{IEEE} {PerCom} 2018}, - author = {Baccelli, Emmanuel and Doerr, Joerg and Kikuchi, Shinji and Padilla, Francisco and Schleiser, Kaspar and Thomas, Ian}, - year = {2018}, - file = {Baccelli et al. - Scripting Over-The-Air Towards Containers on Low-.pdf:/home/mrl/.local/share/zotero/storage/98UTMFAC/Baccelli et al. - Scripting Over-The-Air Towards Containers on Low-.pdf:application/pdf}, -} - -@inproceedings{plasmeijer_task-oriented_2012, - address = {New York, NY, USA}, - series = {{PPDP} '12}, - title = {Task-{Oriented} {Programming} in a {Pure} {Functional} {Language}}, - isbn = {978-1-4503-1522-7}, - url = {https://doi.org/10.1145/2370776.2370801}, - doi = {10.1145/2370776.2370801}, - abstract = {Task-Oriented Programming (TOP) is a novel programming paradigm for the construction of distributed systems where users work together on the internet. When multiple users collaborate, they need to interact with each other frequently. TOP supports the definition of tasks that react to the progress made by others. With TOP, complex multi-user interactions can be programmed in a declarative style just by defining the tasks that have to be accomplished, thus eliminating the need to worry about the implementation detail that commonly frustrates the development of applications for this domain. TOP builds on four core concepts: tasks that represent computations or work to do which have an observable value that may change over time, data sharing enabling tasks to observe each other while the work is in progress, generic type driven generation of user interaction, and special combinators for sequential and parallel task composition. The semantics of these core concepts is defined in this paper. As an example we present the iTask3 framework, which embeds TOP in the functional programming language Clean.}, - booktitle = {Proceedings of the 14th {Symposium} on {Principles} and {Practice} of {Declarative} {Programming}}, - publisher = {Association for Computing Machinery}, - author = {Plasmeijer, Rinus and Lijnse, Bas and Michels, Steffen and Achten, Peter and Koopman, Pieter}, - year = {2012}, - note = {event-place: Leuven, Belgium}, - keywords = {clean, task-oriented programming}, - pages = {195--206}, - file = {103802.pdf:/home/mrl/.local/share/zotero/storage/ZE6A65AW/103802.pdf:application/pdf}, -} - -@incollection{wang_maintaining_2018, - address = {Cham}, - title = {Maintaining {Separation} of {Concerns} {Through} {Task} {Oriented} {Software} {Development}}, - volume = {10788}, - isbn = {978-3-319-89718-9 978-3-319-89719-6}, - url = {http://link.springer.com/10.1007/978-3-319-89719-6_2}, - abstract = {Task Oriented Programming is a programming paradigm that enhances ‘classic’ functional programming with means to express the coordination of work among people and computer systems, the distribution and control of data sources, and the human-machine interfaces. To make the creation process of such applications feasible, it is important to have separation of concerns. In this paper we demonstrate how this is achieved within the Task Oriented Software Development process and illustrate the approach by means of a case study.}, - language = {en}, - urldate = {2019-01-14}, - booktitle = {Trends in {Functional} {Programming}}, - publisher = {Springer International Publishing}, - author = {Stutterheim, Jurriën and Achten, Peter and Plasmeijer, Rinus}, - editor = {Wang, Meng and Owens, Scott}, - year = {2018}, - doi = {10.1007/978-3-319-89719-6}, - pages = {19--38}, - file = {Stutterheim et al. - 2018 - Maintaining Separation of Concerns Through Task Or.pdf:/home/mrl/.local/share/zotero/storage/4GXJEM2U/Stutterheim et al. - 2018 - Maintaining Separation of Concerns Through Task Or.pdf:application/pdf}, -} - -@book{fowler_domain_2010, - edition = {1st}, - title = {Domain {Specific} {Languages}}, - isbn = {0-321-71294-3}, - abstract = {Designed as a wide-ranging guide to Domain Specific Languages (DSLs) and how to approach building them, this book covers a variety of different techniques available for DSLs. The goal is to provide readers with enough information to make an informed choice about whether or not to use a DSL and what kinds of DSL techniques to employ. Part I is a 150-page narrative overview that gives you a broad understanding of general principles. The reference material in Parts II through VI provides the details and examples you willneed to get started using the various techniques discussed. Both internal and external DSL topics are covered, in addition to alternative computational models and code generation. Although the general principles and patterns presented can be used with whatever programming language you happen to be using, most of the examples are in Java or C\#.}, - publisher = {Addison-Wesley Professional}, - author = {Fowler, Martin}, - year = {2010}, - file = {Fowler - 2010 - Domain-specific languages.pdf:/home/mrl/.local/share/zotero/storage/YYMYXTZ5/Fowler - 2010 - Domain-specific languages.pdf:application/pdf}, -} - -@misc{peter_t_lewis_speech_1985, - address = {Washington, D.C.}, - type = {Speech}, - title = {Speech}, - url = {http://www.chetansharma.com/correcting-the-iot-history/}, - author = {{Peter T. Lewis}}, - month = sep, - year = {1985}, -} - -@article{weiser_computer_1991, - title = {The {Computer} for the 21 st {Century}}, - volume = {265}, - language = {en}, - number = {3}, - journal = {Scientific American}, - author = {Weiser, Mark}, - month = sep, - year = {1991}, - pages = {94--105}, - file = {Weiser - 1991 - The Computer for the 21 st Century.pdf:/home/mrl/.local/share/zotero/storage/N5456M2M/Weiser - 1991 - The Computer for the 21 st Century.pdf:application/pdf}, -} - -@misc{evans_internet_2011, - title = {The {Internet} of {Things}: {How} the {Next} {Evolution} of the {Internet} {Is} {Changing} {Everything}}, - url = {https://www.cisco.com/c/dam/en_us/about/ac79/docs/innov/IoT_IBSG_0411FINAL.pdf}, - language = {en}, - publisher = {Cisco Internet Business Solutions Group (IBSG)}, - author = {Evans, Dave}, - month = apr, - year = {2011}, - file = {Evans - 2011 - How the Next Evolution of the Internet Is Changing.pdf:/home/mrl/.local/share/zotero/storage/32YXCM6P/Evans - 2011 - How the Next Evolution of the Internet Is Changing.pdf:application/pdf}, -} - -@inproceedings{ireland_classification_2009, - address = {Cancun, Mexico}, - title = {A {Classification} of {Object}-{Relational} {Impedance} {Mismatch}}, - isbn = {978-0-7695-3550-0}, - doi = {10.1109/DBKDA.2009.11}, - booktitle = {First {International} {Conference} on {Advances} in {Databases}, {Knowledge}, and {Data} {Applications}}, - publisher = {IEEE}, - author = {Ireland, Christopher and Bowers, David and Newton, Michael and Waugh, Kevin}, - year = {2009}, - pages = {36--43}, -} - -@incollection{hinze_generic_2003, - address = {Berlin, Heidelberg}, - title = {Generic {Haskell}: {Practice} and {Theory}}, - isbn = {978-3-540-45191-4}, - url = {https://doi.org/10.1007/978-3-540-45191-4_1}, - abstract = {Generic Haskell is an extension of Haskell that supports the construction of generic programs. These lecture notes describe the basic constructs of Generic Haskell and highlight the underlying theory.}, - booktitle = {Generic {Programming}: {Advanced} {Lectures}}, - publisher = {Springer Berlin Heidelberg}, - author = {Hinze, Ralf and Jeuring, Johan}, - editor = {Backhouse, Roland and Gibbons, Jeremy}, - year = {2003}, - doi = {10.1007/978-3-540-45191-4_1}, - pages = {1--56}, - file = {Hinze and Jeuring - Generic Haskell practice and theory.pdf:/home/mrl/.local/share/zotero/storage/QDRNI5VB/Hinze and Jeuring - Generic Haskell practice and theory.pdf:application/pdf}, -} diff --git a/thesis.tex b/thesis.tex index 223de1e..0537c34 100644 --- a/thesis.tex +++ b/thesis.tex @@ -1,6 +1,5 @@ \documentclass[twoside,10pt]{book} -%\def\mytitle{Task-Oriented Programming for the Internet of Things} \def\mytitle{Task-Oriented Internet of Things Programming} \def\mysubtitle{a purely functional rhapsody} \def\myauthor{Mart Lubbers} @@ -39,6 +38,7 @@ \subfile{frontmatter/motto} % Table of contents +\setcounter{tocdepth}{1} \tableofcontents \newpage% @@ -64,15 +64,17 @@ \label{prt:dsl} % DSL Techniques -\subfile{domain_specific_languages/dsl_techniques} +\subfile{domain-specific_languages/dsl_techniques} % Deep embedding with class -\subfile{domain_specific_languages/class_deep_embedding} +\subfile{domain-specific_languages/class_deep_embedding} % First-class data types -\subfile{domain_specific_languages/first-class_datatypes} +\subfile{domain-specific_languages/first-class_datatypes} -\part{Task-Oriented Programming}% +\subfile{domain-specific_languages/strongly-typed_multi-view_stack-based_computations} + +\part{Task-Oriented Programming for the Internet of Things}% \label{prt:top} % mTask by example @@ -84,17 +86,25 @@ % Integration \subfile{task_oriented_programming/integration} +% Beyond microprocessors +\subfile{task_oriented_programming/beyond_microprocessors} + \part{Tiered vs.\ tierless programming}% \label{prt:tvt} \subfile{tiered_vs._tierless_programming/smart_campus} \chapter{Modelling naval scenaries using \acrshort{TOP} and \acrshort{IOT}} -Arjan's werk {NLARMS}? +Waarschijnlijk geen tijd voor + +\bookmarksetup{startatroot}% this is it +\addtocontents{toc}{\bigskip}% perhaps as well +\subfile{conclusion/conclusion} % Start appendix \appendix% \label{chp:appendix} +\addcontentsline{toc}{part}{Appendix} \addthumb{Appendices}{\Alph{chapter}}{white}{gray} % Alpha appendix thumbs \subfile{appendix/clean_for_haskell_programmers.tex} @@ -106,7 +116,7 @@ Arjan's werk {NLARMS}? \phantomsection{}% \label{chp:bibliography} \addcontentsline{toc}{chapter}{Bibliography} -\bibliography{thesis} +\bibliography{other,self} % Summary \subfile{backmatter/summary} @@ -137,9 +147,9 @@ Arjan's werk {NLARMS}? \let\clearpage\relax \let\cleardoublepage\relax \listoffigures% -\listoftables% -\listofalgorithms% -\lstlistoflistings% +%\listoftables% +%\listofalgorithms% +%\lstlistoflistings% \endgroup % Index -- 2.20.1