Commit fbeade39 authored by Giuseppe Castagna's avatar Giuseppe Castagna
Browse files

Merge branch 'master' of gitlab.math.univ-paris-diderot.fr:beppe/occurrence-typing

parents 82fcb58e b310bf32
......@@ -412,4 +412,102 @@ abstract = "The notion of parallel reduction is extracted from the Tait-Martin-L
year = {1984},
volume = {103},
edition = {Revised},
}
\ No newline at end of file
}
@inproceedings{KF09,
author = {Knowles, Kenneth and Flanagan, Cormac},
title = {Compositional Reasoning and Decidable Checking for Dependent Contract Types},
year = {2009},
isbn = {9781605583303},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/1481848.1481853},
doi = {10.1145/1481848.1481853},
abstract = {Simple type systems perform compositional reasoning in that the type of a term depends
only on the types of its subterms, and not on their semantics. Contracts offer more
expressive abstractions, but static contract checking systems typically violate those
abstractions and base their reasoning directly upon the semantics of terms. Pragmatically,
this noncompositionality makes the decidability of static checking unpredictable.We
first show how compositional reasoning may be restored using standard type-theoretic
techniques, namely existential types and subtyping. Despite its compositional nature,
our type system is exact, in that the type of a term can completely capture its semantics,
hence demonstrating that precision and compositionality are compatible. We then address
predictability of static checking for contract types by giving a type-checking algorithm
for an important class of programs with contract predicates drawn from a decidable
theory. Our algorithm relies crucially on the fact that the type of a term depends
only the types of its subterms (which fall into the decidable theory) and not their
semantics (which will not, in general).},
booktitle = {Proceedings of the 3rd Workshop on Programming Languages Meets Program Verification},
pages = {27–38},
numpages = {12},
keywords = {compositional reasoning, abstraction, dependent types, refinement types},
location = {Savannah, GA, USA},
series = {PLPV '09}
}
@inProceedings{OTMW04,
author="Ou, Xinming
and Tan, Gang
and Mandelbaum, Yitzhak
and Walker, David",
editor="Levy, Jean-Jacques
and Mayr, Ernst W.
and Mitchell, John C.",
title="Dynamic Typing with Dependent Types",
booktitle="Exploring New Frontiers of Theoretical Informatics",
year="2004",
publisher="Springer US",
address="Boston, MA",
pages="437--450",
abstract="Dependent type systems are promising tools programmers can use to increase the reliability and security of their programs. Unfortunately, dependently-typed programming languages require programmers to annotate their programs with many typing specifications to help guide the type checker. This paper shows how to make the process of programming with dependent types more palatable by defining a language in which programmers have fine-grained control over the trade-off between the number of dependent typing annotations they must place on programs and the degree of compile-time safety. More specifically, certain program fragments are marked dependent, in which case the programmer annotates them in detail and a dependent type checker verifies them at compile time. Other fragments are marked simple, in which case they may be annotation-free and dependent constraints are verified at run time.",
isbn="978-1-4020-8141-5"
}
@inproceedings{RKJ08,
author = {Patrick Maxim Rondon and
Ming Kawaguchi and
Ranjit Jhala},
editor = {Rajiv Gupta and
Saman P. Amarasinghe},
title = {Liquid types},
booktitle = {Proceedings of the {ACM} {SIGPLAN} 2008 Conference on Programming
Language Design and Implementation, Tucson, AZ, USA, June 7-13, 2008},
pages = {159--169},
publisher = {{ACM}},
year = {2008},
url = {https://doi.org/10.1145/1375581.1375602},
doi = {10.1145/1375581.1375602},
timestamp = {Fri, 25 Jun 2021 14:48:54 +0200},
biburl = {https://dblp.org/rec/conf/pldi/RondonKJ08.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@inproceedings{SF92,
author = {Sabry, Amr and Felleisen, Matthias},
title = {Reasoning about Programs in Continuation-Passing Style.},
year = {1992},
isbn = {0897914813},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/141471.141563},
doi = {10.1145/141471.141563},
abstract = {Plotkin's λ-value calculus is sound but incomplete for reasoning about βeegr;-transformations
on programs in continuation-passing style (CPS). To find a complete extension, we
define a new, compactifying CPS transformation and an “inverse”mapping, un-CPS, both
of which are interesting in their own right. Using the new CPS transformation, we
can determine the precise language of CPS terms closed under β7eegr;-transformations.
Using the un-CPS transformation, we can derive a set of axioms such that every equation
between source programs is provable if and only if βη can prove the corresponding
equation between CPS programs. The extended calculus is equivalent to an untyped
variant of Moggi's computational λ-calculus.},
booktitle = {Proceedings of the 1992 ACM Conference on LISP and Functional Programming},
pages = {288–298},
numpages = {11},
location = {San Francisco, California, USA},
series = {LFP '92}
}
......@@ -243,6 +243,29 @@ appear in a logical formula (where previous work only allowed formulae
on ``base types''). This is done in the context of a dynamic
language and their approach is extended with polymorphism, dynamic
dispatch and record types.
\rev{A problem that is faced by refinement type systems is the one of
propagating in the branches of a test the information learned on
a \emph{sub-expression} of a tested expression. A solution
e.g. choosen by \cite{OTMW04} and \cite{KF09} is to devise a
meta-function that recursively explores both a type and an expression
and propagates the type information on the sub-expressions. This
process (called \emph{selfification} in the cited works) roughly
corresponds to our $\constrf$ function (see Section~\ref{sec:typenv}).
Another approach is the one followed by \cite{RKJ08} is based purely
on a program transformation, namely putting the the term
in \emph{administrative normal form} (\cite{F92}). Using a program
transformation, every destructor application (function application,
projection, \ldots) is given a name through a let-binding. The problem
of tracking precise type information for every sub-expression is
therefore reduced to the one of keeping precise typing information for
a variable. While this solution seems appealing, it is not as
straightforward in our case. Indeed, to retain the same degree of
precision, we would need to identify alpha equivalent sub-expressions
and share the same binding, something that a plain A-normalisation
does not provide.
}
\rev{TODO Intersection + refinement}
\rev{TODO refinement + gradual ?}
\citet{Kent16} bridge the gap between prior work on occurrence typing
and SMT based (sub) typing. It introduces the $\lambda_{RTR}$ core calculus, an
......
......@@ -632,7 +632,10 @@ the new contributions.
Victor
\end{answer}
\end{itemize}
\begin{answer}
We have further developped the our section of the related work on
refinment types, using the given references has a starting point.
\end{answer}
\item \textbf{Meta-theory Clarifications:}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment