improved beta-equivalence, added preprocessor

This commit is contained in:
William Ball 2024-11-22 10:36:51 -08:00
parent 7b037db6c0
commit 75ab0a1a41
11 changed files with 268 additions and 233 deletions

View file

@ -19,7 +19,7 @@ Definitions work similarly, having abstract syntax as shown below.
<ident> (<ident> : <type>)* : <type>? := <term> | axiom; <ident> (<ident> : <type>)* : <type>? := <term> | axiom;
(The distinction between `<type>` and `<term>` is for emphasis; they are the exact same syntactic category.) Here&rsquo;s a couple definitions of the `const` function from above showing the options with the syntax, and a more complex example declaring functional extensionality as an axiom (assuming equality has been previously defined having type `eq : Π (A : *) → A → A → *`). (The distinction between `<type>` and `<term>` is for emphasis; they are the exact same syntactic category.) Here&rsquo;s a couple definitions of the `const` function from above showing the options with the syntax, and a more complex example declaring functional extensionality as an axiom (assuming equality has been previously defined having type `eq : Π (A : *) → A → A → *`). Duplicate definitions are not normally allowed and will result in an error.
const := λ (α : *) ⇒ λ (β : *) ⇒ λ (x : α) => λ (y : β) => x; const := λ (α : *) ⇒ λ (β : *) ⇒ λ (x : α) => λ (y : β) => x;
const : ∀ (α β : *), α → β → α := fun (α β : *) (x : α) (y : β) ⇒ x; const : ∀ (α β : *), α → β → α := fun (α β : *) (x : α) (y : β) ⇒ x;
@ -33,34 +33,51 @@ If the RHS of a definition is `axiom`, then `perga` will assume that the identif
Line comments are `--` like in Haskell, and block comments are `[* *]` somewhat like ML (and nest properly). There is no significant whitespace, so you are free to format code as you wish. Line comments are `--` like in Haskell, and block comments are `[* *]` somewhat like ML (and nest properly). There is no significant whitespace, so you are free to format code as you wish.
There isn&rsquo;t a proper module system (yet), but you can include other files in a dumb, C preprocessor way by using `@include <filepath>` (NOTE: this unfortunately messes up line numbers in error messages). Filepaths are relative to the current file.
# Usage # Usage
Running `perga` without any arguments drops you into a basic repl. From here, you can type in definitions which `perga` will typecheck. Previous definitions are accessible in future definitions. The usual readline keybindings are available, including navigating history, which is saved between sessions (in `~/.cache/perga/history`). In the repl, you can enter &ldquo;:q&rdquo;, press C-c, or press C-d to quit. Entering &ldquo;:e&rdquo; shows everything that has been defined along with their types. Entering &ldquo;:t <ident>&rdquo; prints the type of a particular identifier. Entering &ldquo;:n <expr>&rdquo; will fully normalize (including unfolding definitions) an expression. Running `perga` without any arguments drops you into a basic repl. From here, you can type in definitions which `perga` will typecheck. Previous definitions are accessible in future definitions. The usual readline keybindings are available, including navigating history, which is saved between sessions (in `~/.cache/perga/history`). In the repl, you can enter &ldquo;:q&rdquo;, press C-c, or press C-d to quit. Entering &ldquo;:e&rdquo; shows everything that has been defined along with their types. Entering &ldquo;:t <ident>&rdquo; prints the type of a particular identifier, while &ldquo;:v <ident>&rdquo; prints its value. Entering &ldquo;:n <expr>&rdquo; will fully normalize (including unfolding definitions) an expression, while &ldquo;:w <expr>&rdquo; will reduce it to weak head normal form. Finally &ldquo;:l <filepath>&rdquo; loads a file.
Here&rsquo;s an example session defining Church numerals and doing some computations with them to show the capabilities of the repl. Here&rsquo;s an example session showing the capabilities of the repl.
> nat : * := forall (A : *), (A → A) → A → A; > :l examples/computation.pg
> zero : nat := fun (A : *) (f : A -> A) (x : A) ⇒ x; loading: examples/computation.pg
> :t zero
nat
> suc : nat → nat := fun (n : nat) (A : *) (f : A → A) (x : A) ⇒ f (n A f x);
> one : nat := suc zero;
> add (n m : nat) : nat := fun (A : *) (f : A → A) (x : A) ⇒ n A f (m A f x);
> :e > :e
add : nat -> nat -> nat eight : nat
eq : ∏ (A : *) . A -> A -> *
eq_cong : ∏ (A B : *) (x y : A) (f : A -> B) . eq A x y -> eq B (f x) (f y)
eq_refl : ∏ (A : *) (x : A) . eq A x x
eq_sym : ∏ (A : *) (x y : A) . eq A x y -> eq A y x
eq_trans : ∏ (A : *) (x y z : A) . eq A x y -> eq A y z -> eq A x z
five : nat
four : nat
nat : * nat : *
nine : nat
one : nat one : nat
one_plus_one_is_two : eq nat (plus one one) two
plus : nat -> nat -> nat
seven : nat
six : nat
suc : nat -> nat suc : nat -> nat
ten : nat
three : nat
times : nat -> nat -> nat
two : nat
two_plus_two_is_four : eq nat (plus two two) four
two_times_five_is_ten : eq nat (times two five) ten
zero : nat zero : nat
> two : nat := suc one; > :n plus one one
> :n add one one
λ (A : *) (f : A -> A) (x : A) . f (f x) λ (A : *) (f : A -> A) (x : A) . f (f x)
> :n two > :n two
λ (A : *) (f : A -> A) (x : A) . f (f x) λ (A : *) (f : A -> A) (x : A) . f (f x)
> :q > :w plus one one
λ (A : *) (f : A -> A) (x : A) . one A f (one A f x)
> :w two
λ (A : *) (f : A -> A) (x : A) . f (one A f x)
You can also give `perga` a filename as an argument, in which case it will typecheck every definition in the file. Upon finishing, which should be nearly instantaneous, it will print out all the definitions it parsed along with their types (like you had typed &ldquo;:e&rdquo; in the repl) so you can verify that it worked. You can also give `perga` a filename as an argument, in which case it will typecheck every definition in the file. If you give `perga` multiple filenames, it will process each one in turn, sharing an environment between them. Upon finishing, which should be nearly instantaneous, it will print out all files it processed, and &ldquo;success!&rdquo; if it successfully typechecked, and the first error it encountered otherwise.
# Simple Example # Simple Example
@ -117,24 +134,29 @@ This means our proofs were accepted. Furthermore, as a sanity check, we can see
I vaguely remember from reading Coq&rsquo;art that Coq does something special with `let` expressions, so I&rsquo;ll maybe want to check that out. I tried implementing `let` as syntax sugar for an immediately called function, but that proved to be a massive mess with how I&rsquo;m handling things. `let` expressions would definitely be handy for factoring out complex sub expressions. I vaguely remember from reading Coq&rsquo;art that Coq does something special with `let` expressions, so I&rsquo;ll maybe want to check that out. I tried implementing `let` as syntax sugar for an immediately called function, but that proved to be a massive mess with how I&rsquo;m handling things. `let` expressions would definitely be handy for factoring out complex sub expressions.
### TODO Sections
Coq-style sections would be very handy, and probably relatively easy to implement. Upon parsing a definition inside a section, will somehow need to look ahead to see what variables are used to see how I need to modify `binders`, or just make every definition require every section variable as an argument.
### TODO Inference ### TODO Inference
Obviously not fully decidable, but I might be able to implement some basic unification algorithm. This isn&rsquo;t super necessary though, I find leaving off the types of arguments to generally be a bad idea, but in some cases it can be handy, especially not at the top level. Not decidable, but I might be able to implement some basic unification algorithm, or switch to bidirectional type checking. This isn&rsquo;t super necessary though, I find leaving off the types of arguments to generally be a bad idea, but in some cases it can be handy, especially not at the top level.
### TODO Implicits ### TODO Implicits
Much, much more useful than [inference](#org8f16b1f), implicit arguments would be amazing. It also seems a lot more complicated, but any system for dealing with implicit arguments is far better than none. Getting rid of stuff like [lines 213-215 of the example file](./examples/example.pg) would be amazing. Much, much more useful than [inference](#orgc6eb0b1), implicit arguments would be amazing. It also seems a lot more complicated, but any system for dealing with implicit arguments is far better than none.
### TODO Module System ### TODO Module System
A proper module system would be wonderful. To me, ML style modules with structures, signatures, and functors seems like the right way to handle algebraic structures for a relatively simple language, rather than records (or a bunch of `and`&rsquo;s like I currently have) or type classes (probably much harder, but could be nicer), but any way of managing scope, importing files, etc. is a necessity. A proper module system would be wonderful. To me, ML style modules with structures, signatures, and functors seems like the right way to handle algebraic structures for a relatively simple language, rather than records (or, worse, a bunch of `and`&rsquo;s like I currently have; especially painful without [implicits](#orged32318)) or type classes (probably much harder, but could be nicer), but any way of managing scope, importing files, etc. is a necessity.
### TODO Universes? ### TODO Universes?
Not super necessary, but could be fun. Not super necessary, but occasionally extremely useful. Could be fun, idk.
### TODO Inductive Definitions ### TODO Inductive Definitions
@ -150,24 +172,29 @@ This is definitely a stretch goal. It would be cool though, and would turn this
Right now, everything defaults to one line, which can be a problem with how large the proof terms get. Probably want to use [prettyprinter](https://hackage.haskell.org/package/prettyprinter) to be able to nicely handle indentation and line breaks. Right now, everything defaults to one line, which can be a problem with how large the proof terms get. Probably want to use [prettyprinter](https://hackage.haskell.org/package/prettyprinter) to be able to nicely handle indentation and line breaks.
### TODO Smarter normalization/beta-equivalence checking
I had what I thought was a smarter way to check for β-equivalence than just fully normalizing both terms and checking if they are the same, but it turned out to be a little wrong, which isn&rsquo;t too surprising since I just made it up. It&rsquo;s probably salvageable, but I&rsquo;d also like to look into other forms of normalization and checking for β-equivalence.
### TODO Better repl ### TODO Better repl
The repl is decent, but implementing something like [this](https://abhinavsarkar.net/posts/repling-with-haskeline/) would be awesome. I&rsquo;d also at least like to add a new command `":l <filename>"` to load the definitions from a file. The repl is decent, probably the most fully-featured repl I&rsquo;ve ever made, but implementing something like [this](https://abhinavsarkar.net/posts/repling-with-haskeline/) would be awesome.
### TODO Improve error messages ### TODO Improve error messages
Error messages are decent, but a little buggy. Syntax error messages are pretty ok, but could have better labeling. The type check error messages are decent, but could do with better location information. Right now, the location defaults to the end of the current definition, which is often good enough, but more detail can&rsquo;t hurt. Error messages are decent, but a little buggy. Syntax error messages are pretty ok, but could have better labeling. The type check error messages are decent, but could do with better location information. Right now, the location defaults to the end of the current definition, which is often good enough, but more detail can&rsquo;t hurt. The errors are generally very janky and hard to read. Having had quite a bit of practice reading them now, they actually provide very useful information, but could be made **a lot** more readable.
### TODO Better testing ### TODO Document library code
I would like to avoid regressions as I keep working on this, and a suite of unit tests would make me feel much more comfortable. I made unit tests, then added a ton of stuff. Most of the unit tests are kind of pointless now. For now, I think running the code on the example files is pretty sufficient. Low priority, as I&rsquo;m the only one working on this, I&rsquo;m working on it very actively, and things will continue rapidly changing, but I&rsquo;ll want to get around to it once things stabilize, before I forget how everything works.
### TODO Add versions to `perga.cabal` and/or nixify
Probably a smart idea.
### TODO More incremental parsing/typechecking
Right now, if there&rsquo;s a failure, everything just stops immediately. More incremental parsing/typechecking could pave the way for more interactivity, e.g. development with holes, an LSP server, etc., not to mention better error messages.
### TODO Alternate syntax ### TODO Alternate syntax
@ -199,9 +226,20 @@ and is more intuitively understandable to a mathematician not familiar with type
I&rsquo;m imagining the parser could be chosen based on the file extension or something. Some way to mix the syntaxes could be nice too. I&rsquo;m imagining the parser could be chosen based on the file extension or something. Some way to mix the syntaxes could be nice too.
### TODO treesitter parser and/or emacs mode ### TODO Infix/misfix operators
Really not necessary, especially while the syntax is in a bit of flux, but would eventually be nice. The syntax is simple enough that a treesitter grammar shouldn&rsquo;t be too hard to write. An emacs mode would especially be nice if I ever get end up implementing an [alternate syntax](#orgf39c63f), to better handle indentation, automatically adjust line numbers, etc. Infix/misfix operators would be very nice and make `perga` look more normal. It&rsquo;s funny, at the moment it looks a lot like a lisp, even though it&rsquo;s totally not.
(eq_trans nat (plus n (suc m)) (suc (plus n m)) (plus (suc m) n)
(plus_s_r n m)
(eq_trans nat (suc (plus n m)) (suc (plus m n)) (plus (suc m) n)
(eq_cong nat nat (plus n m) (plus m n) suc IH)
(eq_sym nat (plus (suc m) n) (suc (plus m n)) (plus_s_l m n))))
### DONE treesitter parser and/or emacs mode
There&rsquo;s a [tree-sitter parser](https://forgejo.ballcloud.cc/wball/tree-sitter-perga) and [neovim plugin](https://forgejo.ballcloud.cc/wball/perga.nvim) available now, but no emacs-mode.
### TODO TUI ### TODO TUI

View file

@ -20,7 +20,7 @@ Definitions work similarly, having abstract syntax as shown below.
#+begin_src #+begin_src
<ident> (<ident> : <type>)* : <type>? := <term> | axiom; <ident> (<ident> : <type>)* : <type>? := <term> | axiom;
#+end_src #+end_src
(The distinction between =<type>= and =<term>= is for emphasis; they are the exact same syntactic category.) Here's a couple definitions of the =const= function from above showing the options with the syntax, and a more complex example declaring functional extensionality as an axiom (assuming equality has been previously defined having type =eq : Π (A : *) → A → A → *=). (The distinction between =<type>= and =<term>= is for emphasis; they are the exact same syntactic category.) Here's a couple definitions of the =const= function from above showing the options with the syntax, and a more complex example declaring functional extensionality as an axiom (assuming equality has been previously defined having type =eq : Π (A : *) → A → A → *=). Duplicate definitions are not normally allowed and will result in an error.
#+begin_src #+begin_src
const := λ (α : *) ⇒ λ (β : *) ⇒ λ (x : α) => λ (y : β) => x; const := λ (α : *) ⇒ λ (β : *) ⇒ λ (x : α) => λ (y : β) => x;
const : ∀ (α β : *), α → β → α := fun (α β : *) (x : α) (y : β) ⇒ x; const : ∀ (α β : *), α → β → α := fun (α β : *) (x : α) (y : β) ⇒ x;
@ -34,33 +34,50 @@ If the RHS of a definition is =axiom=, then =perga= will assume that the identif
Line comments are =--= like in Haskell, and block comments are =[* *]= somewhat like ML (and nest properly). There is no significant whitespace, so you are free to format code as you wish. Line comments are =--= like in Haskell, and block comments are =[* *]= somewhat like ML (and nest properly). There is no significant whitespace, so you are free to format code as you wish.
* Usage There isn't a proper module system (yet), but you can include other files in a dumb, C preprocessor way by using =@include <filepath>= (NOTE: this unfortunately messes up line numbers in error messages). Filepaths are relative to the current file.
Running =perga= without any arguments drops you into a basic repl. From here, you can type in definitions which =perga= will typecheck. Previous definitions are accessible in future definitions. The usual readline keybindings are available, including navigating history, which is saved between sessions (in =~/.cache/perga/history=). In the repl, you can enter ":q", press C-c, or press C-d to quit. Entering ":e" shows everything that has been defined along with their types. Entering ":t <ident>" prints the type of a particular identifier. Entering ":n <expr>" will fully normalize (including unfolding definitions) an expression.
Here's an example session defining Church numerals and doing some computations with them to show the capabilities of the repl. * Usage
#+begin_src Running =perga= without any arguments drops you into a basic repl. From here, you can type in definitions which =perga= will typecheck. Previous definitions are accessible in future definitions. The usual readline keybindings are available, including navigating history, which is saved between sessions (in =~/.cache/perga/history=). In the repl, you can enter ":q", press C-c, or press C-d to quit. Entering ":e" shows everything that has been defined along with their types. Entering ":t <ident>" prints the type of a particular identifier, while ":v <ident>" prints its value. Entering ":n <expr>" will fully normalize (including unfolding definitions) an expression, while ":w <expr>" will reduce it to weak head normal form. Finally ":l <filepath>" loads a file.
> nat : * := forall (A : *), (A → A) → A → A;
> zero : nat := fun (A : *) (f : A -> A) (x : A) ⇒ x; Here's an example session showing the capabilities of the repl.
> :t zero #+begin_src text
nat > :l examples/computation.pg
> suc : nat → nat := fun (n : nat) (A : *) (f : A → A) (x : A) ⇒ f (n A f x); loading: examples/computation.pg
> one : nat := suc zero;
> add (n m : nat) : nat := fun (A : *) (f : A → A) (x : A) ⇒ n A f (m A f x);
> :e > :e
add : nat -> nat -> nat eight : nat
eq : ∏ (A : *) . A -> A -> *
eq_cong : ∏ (A B : *) (x y : A) (f : A -> B) . eq A x y -> eq B (f x) (f y)
eq_refl : ∏ (A : *) (x : A) . eq A x x
eq_sym : ∏ (A : *) (x y : A) . eq A x y -> eq A y x
eq_trans : ∏ (A : *) (x y z : A) . eq A x y -> eq A y z -> eq A x z
five : nat
four : nat
nat : * nat : *
nine : nat
one : nat one : nat
one_plus_one_is_two : eq nat (plus one one) two
plus : nat -> nat -> nat
seven : nat
six : nat
suc : nat -> nat suc : nat -> nat
ten : nat
three : nat
times : nat -> nat -> nat
two : nat
two_plus_two_is_four : eq nat (plus two two) four
two_times_five_is_ten : eq nat (times two five) ten
zero : nat zero : nat
> two : nat := suc one; > :n plus one one
> :n add one one
λ (A : *) (f : A -> A) (x : A) . f (f x) λ (A : *) (f : A -> A) (x : A) . f (f x)
> :n two > :n two
λ (A : *) (f : A -> A) (x : A) . f (f x) λ (A : *) (f : A -> A) (x : A) . f (f x)
> :q > :w plus one one
λ (A : *) (f : A -> A) (x : A) . one A f (one A f x)
> :w two
λ (A : *) (f : A -> A) (x : A) . f (one A f x)
#+end_src #+end_src
You can also give =perga= a filename as an argument, in which case it will typecheck every definition in the file. Upon finishing, which should be nearly instantaneous, it will print out all the definitions it parsed along with their types (like you had typed ":e" in the repl) so you can verify that it worked. You can also give =perga= a filename as an argument, in which case it will typecheck every definition in the file. If you give =perga= multiple filenames, it will process each one in turn, sharing an environment between them. Upon finishing, which should be nearly instantaneous, it will print out all files it processed, and "success!" if it successfully typechecked, and the first error it encountered otherwise.
* Simple Example * Simple Example
There are many very well commented examples in the [[./examples/]] folder. These include There are many very well commented examples in the [[./examples/]] folder. These include
@ -108,17 +125,20 @@ This means our proofs were accepted. Furthermore, as a sanity check, we can see
*** TODO Let-expressions *** TODO Let-expressions
I vaguely remember from reading Coq'art that Coq does something special with =let= expressions, so I'll maybe want to check that out. I tried implementing =let= as syntax sugar for an immediately called function, but that proved to be a massive mess with how I'm handling things. =let= expressions would definitely be handy for factoring out complex sub expressions. I vaguely remember from reading Coq'art that Coq does something special with =let= expressions, so I'll maybe want to check that out. I tried implementing =let= as syntax sugar for an immediately called function, but that proved to be a massive mess with how I'm handling things. =let= expressions would definitely be handy for factoring out complex sub expressions.
*** TODO Sections
Coq-style sections would be very handy, and probably relatively easy to implement. Upon parsing a definition inside a section, will somehow need to look ahead to see what variables are used to see how I need to modify =binders=, or just make every definition require every section variable as an argument.
*** TODO Inference *** TODO Inference
Obviously not fully decidable, but I might be able to implement some basic unification algorithm. This isn't super necessary though, I find leaving off the types of arguments to generally be a bad idea, but in some cases it can be handy, especially not at the top level. Not decidable, but I might be able to implement some basic unification algorithm, or switch to bidirectional type checking. This isn't super necessary though, I find leaving off the types of arguments to generally be a bad idea, but in some cases it can be handy, especially not at the top level.
*** TODO Implicits *** TODO Implicits
Much, much more useful than [[Inference][inference]], implicit arguments would be amazing. It also seems a lot more complicated, but any system for dealing with implicit arguments is far better than none. Getting rid of stuff like [[./examples/example.pg::213][lines 213-215 of the example file]] would be amazing. Much, much more useful than [[Inference][inference]], implicit arguments would be amazing. It also seems a lot more complicated, but any system for dealing with implicit arguments is far better than none.
*** TODO Module System *** TODO Module System
A proper module system would be wonderful. To me, ML style modules with structures, signatures, and functors seems like the right way to handle algebraic structures for a relatively simple language, rather than records (or a bunch of =and='s like I currently have) or type classes (probably much harder, but could be nicer), but any way of managing scope, importing files, etc. is a necessity. A proper module system would be wonderful. To me, ML style modules with structures, signatures, and functors seems like the right way to handle algebraic structures for a relatively simple language, rather than records (or, worse, a bunch of =and='s like I currently have; especially painful without [[Implicits][implicits]]) or type classes (probably much harder, but could be nicer), but any way of managing scope, importing files, etc. is a necessity.
*** TODO Universes? *** TODO Universes?
Not super necessary, but could be fun. Not super necessary, but occasionally extremely useful. Could be fun, idk.
*** TODO Inductive Definitions *** TODO Inductive Definitions
This is definitely a stretch goal. It would be cool though, and would turn this proof checker into a much more competent programming language. It's not necessary for the math, but inductive definitions let you leverage computation in proofs, which is amazing. They also make certain definitions way easier, by avoiding needing to manually stipulate elimination rules, including induction principles, and let you keep more math constructive and understandable to the computer. This is definitely a stretch goal. It would be cool though, and would turn this proof checker into a much more competent programming language. It's not necessary for the math, but inductive definitions let you leverage computation in proofs, which is amazing. They also make certain definitions way easier, by avoiding needing to manually stipulate elimination rules, including induction principles, and let you keep more math constructive and understandable to the computer.
@ -128,17 +148,20 @@ This is definitely a stretch goal. It would be cool though, and would turn this
*** TODO Prettier pretty printing *** TODO Prettier pretty printing
Right now, everything defaults to one line, which can be a problem with how large the proof terms get. Probably want to use [[https://hackage.haskell.org/package/prettyprinter][prettyprinter]] to be able to nicely handle indentation and line breaks. Right now, everything defaults to one line, which can be a problem with how large the proof terms get. Probably want to use [[https://hackage.haskell.org/package/prettyprinter][prettyprinter]] to be able to nicely handle indentation and line breaks.
*** TODO Smarter normalization/beta-equivalence checking
I had what I thought was a smarter way to check for β-equivalence than just fully normalizing both terms and checking if they are the same, but it turned out to be a little wrong, which isn't too surprising since I just made it up. It's probably salvageable, but I'd also like to look into other forms of normalization and checking for β-equivalence.
*** TODO Better repl *** TODO Better repl
The repl is decent, but implementing something like [[https://abhinavsarkar.net/posts/repling-with-haskeline/][this]] would be awesome. I'd also at least like to add a new command =":l <filename>"= to load the definitions from a file. The repl is decent, probably the most fully-featured repl I've ever made, but implementing something like [[https://abhinavsarkar.net/posts/repling-with-haskeline/][this]] would be awesome.
*** TODO Improve error messages *** TODO Improve error messages
Error messages are decent, but a little buggy. Syntax error messages are pretty ok, but could have better labeling. The type check error messages are decent, but could do with better location information. Right now, the location defaults to the end of the current definition, which is often good enough, but more detail can't hurt. Error messages are decent, but a little buggy. Syntax error messages are pretty ok, but could have better labeling. The type check error messages are decent, but could do with better location information. Right now, the location defaults to the end of the current definition, which is often good enough, but more detail can't hurt. The errors are generally very janky and hard to read. Having had quite a bit of practice reading them now, they actually provide very useful information, but could be made *a lot* more readable.
*** TODO Better testing *** TODO Document library code
I would like to avoid regressions as I keep working on this, and a suite of unit tests would make me feel much more comfortable. I made unit tests, then added a ton of stuff. Most of the unit tests are kind of pointless now. For now, I think running the code on the example files is pretty sufficient. Low priority, as I'm the only one working on this, I'm working on it very actively, and things will continue rapidly changing, but I'll want to get around to it once things stabilize, before I forget how everything works.
*** TODO Add versions to =perga.cabal= and/or nixify
Probably a smart idea.
*** TODO More incremental parsing/typechecking
Right now, if there's a failure, everything just stops immediately. More incremental parsing/typechecking could pave the way for more interactivity, e.g. development with holes, an LSP server, etc., not to mention better error messages.
*** TODO Alternate syntax *** TODO Alternate syntax
I've had a bunch of ideas for a more mathematician-friendly syntax bouncing around my head for a while. Implementing one of them would be awesome, but probably quite tricky. I've had a bunch of ideas for a more mathematician-friendly syntax bouncing around my head for a while. Implementing one of them would be awesome, but probably quite tricky.
@ -167,8 +190,18 @@ and is more intuitively understandable to a mathematician not familiar with type
I'm imagining the parser could be chosen based on the file extension or something. Some way to mix the syntaxes could be nice too. I'm imagining the parser could be chosen based on the file extension or something. Some way to mix the syntaxes could be nice too.
*** TODO treesitter parser and/or emacs mode *** TODO Infix/misfix operators
Really not necessary, especially while the syntax is in a bit of flux, but would eventually be nice. The syntax is simple enough that a treesitter grammar shouldn't be too hard to write. An emacs mode would especially be nice if I ever get end up implementing an [[Alternate syntax][alternate syntax]], to better handle indentation, automatically adjust line numbers, etc. Infix/misfix operators would be very nice and make =perga= look more normal. It's funny, at the moment it looks a lot like a lisp, even though it's totally not.
#+begin_src
(eq_trans nat (plus n (suc m)) (suc (plus n m)) (plus (suc m) n)
(plus_s_r n m)
(eq_trans nat (suc (plus n m)) (suc (plus m n)) (plus (suc m) n)
(eq_cong nat nat (plus n m) (plus m n) suc IH)
(eq_sym nat (plus (suc m) n) (suc (plus m n)) (plus_s_l m n))))
#+end_src
*** DONE treesitter parser and/or emacs mode
There's a [[https://forgejo.ballcloud.cc/wball/tree-sitter-perga][tree-sitter parser]] and [[https://forgejo.ballcloud.cc/wball/perga.nvim][neovim plugin]] available now, but no emacs-mode.
*** TODO TUI *** TODO TUI
This is definitely a stretch goal, and I'm not sure how good of an idea it would be, but I'm imagining a TUI split into two panels. On the left you can see the term you are building with holes in it. On the right you have the focused hole's type as well as the types of everything in scope (like Coq and Lean show while you're in the middle of a proof). Then you can interact with the system by entering commands (e.g. =intros=, =apply=, etc.) which changes the proof term on the left. You'd also just be able to type in the left window as well, and edit the proof term directly. This way you'd get the benefits of working with tactics, making it way faster to construct proof terms, and the benefits of working with proof terms directly, namely transparency and simplicity. I'll probably want to look into [[https://hackage.haskell.org/package/brick][brick]] if I want to make this happen. This is definitely a stretch goal, and I'm not sure how good of an idea it would be, but I'm imagining a TUI split into two panels. On the left you can see the term you are building with holes in it. On the right you have the focused hole's type as well as the types of everything in scope (like Coq and Lean show while you're in the middle of a proof). Then you can interact with the system by entering commands (e.g. =intros=, =apply=, etc.) which changes the proof term on the left. You'd also just be able to type in the left window as well, and edit the proof term directly. This way you'd get the benefits of working with tactics, making it way faster to construct proof terms, and the benefits of working with proof terms directly, namely transparency and simplicity. I'll probably want to look into [[https://hackage.haskell.org/package/brick][brick]] if I want to make this happen.

View file

@ -1,30 +1,19 @@
module Main where module Main where
import Control.Monad (void) import Control.Monad (void)
import qualified Data.Map as M import Control.Monad.Except
import qualified Data.Text.IO as T import Eval (Env, emptyEnv)
import Eval (Env) import Parser (handleFile)
import Parser
import Repl import Repl
import System.Environment import System.Environment
import System.IO
main :: IO () main :: IO ()
main = do main = do
args <- getArgs args <- getArgs
case args of case args of
[] -> void repl [] -> void repl
[file] -> handleFile file files -> handleFiles emptyEnv files
_ -> putStrLn "usage './perga' for repl and './perga <filename>' to get input from a file"
dumpEnv :: Env -> IO () handleFiles :: Env -> [String] -> IO ()
dumpEnv = void . M.traverseWithKey ((putStrLn .) . showEnvEntry) handleFiles _ [] = putStrLn "success!"
handleFiles env (file : rest) = runExceptT (handleFile env file) >>= either putStrLn (`handleFiles` rest)
handleFile :: String -> IO ()
handleFile fileName =
do
fileH <- openFile fileName ReadMode
input <- T.hGetContents fileH
case parseProgram input of
Left err -> putStrLn err
Right env -> dumpEnv env

View file

@ -1,10 +1,11 @@
module Repl (repl, showEnvEntry) where module Repl (repl, showEnvEntry) where
import Control.Monad.Except
import Control.Monad.Reader import Control.Monad.Reader
import Data.Functor (void)
import Data.List (isPrefixOf, stripPrefix) import Data.List (isPrefixOf, stripPrefix)
import qualified Data.Map as M import qualified Data.Map as M
import qualified Data.Text as T import qualified Data.Text as T
import Errors (Result)
import Eval import Eval
import Expr import Expr
import Parser import Parser
@ -12,7 +13,16 @@ import System.Console.Haskeline
import System.Directory (createDirectoryIfMissing, getHomeDirectory) import System.Directory (createDirectoryIfMissing, getHomeDirectory)
import System.FilePath ((</>)) import System.FilePath ((</>))
data ReplCommand = Quit | DumpEnv | TypeQuery String | Normalize String | Input String deriving (Show) data ReplCommand
= Quit
| DumpEnv
| TypeQuery String
| ValueQuery String
| Normalize String
| WeakNormalize String
| Input String
| LoadFile String
deriving (Show)
parseCommand :: Maybe String -> Maybe ReplCommand parseCommand :: Maybe String -> Maybe ReplCommand
parseCommand Nothing = Nothing parseCommand Nothing = Nothing
@ -20,15 +30,12 @@ parseCommand (Just ":q") = Just Quit
parseCommand (Just ":e") = Just DumpEnv parseCommand (Just ":e") = Just DumpEnv
parseCommand (Just input) parseCommand (Just input)
| ":t " `isPrefixOf` input = TypeQuery <$> stripPrefix ":t " input | ":t " `isPrefixOf` input = TypeQuery <$> stripPrefix ":t " input
| ":v " `isPrefixOf` input = ValueQuery <$> stripPrefix ":v " input
| ":n " `isPrefixOf` input = Normalize <$> stripPrefix ":n " input | ":n " `isPrefixOf` input = Normalize <$> stripPrefix ":n " input
| ":w " `isPrefixOf` input = WeakNormalize <$> stripPrefix ":w " input
| ":l " `isPrefixOf` input = LoadFile <$> stripPrefix ":l " input
| otherwise = Just $ Input input | otherwise = Just $ Input input
showEnvEntry :: T.Text -> EnvLine -> String
showEnvEntry k EL{_ty = t} = T.unpack k ++ " : " ++ prettyS t
dumpEnv :: Env -> InputT IO ()
dumpEnv = void . M.traverseWithKey ((outputStrLn .) . showEnvEntry)
handleInput :: Env -> String -> InputT IO Env handleInput :: Env -> String -> InputT IO Env
handleInput env input = handleInput env input =
let (res, env') = parseDefEmpty env (T.pack input) let (res, env') = parseDefEmpty env (T.pack input)
@ -36,6 +43,20 @@ handleInput env input =
Left err -> outputStrLn err >> pure env' Left err -> outputStrLn err >> pure env'
Right () -> pure env' Right () -> pure env'
actOnParse :: Env -> String -> (Expr -> InputT IO ()) -> InputT IO ()
actOnParse env input action = case parseExpr env (T.pack input) of
Left err -> outputStrLn err
Right expr -> action expr
printErrorOrResult :: Env -> (Expr -> ReaderT Env Result Expr) -> Expr -> InputT IO ()
printErrorOrResult env action expr = outputStrLn $ either show prettyS $ runReaderT (action expr) env
parseActPrint :: Env -> String -> (Expr -> ReaderT Env Result Expr) -> InputT IO ()
parseActPrint env input action = actOnParse env input (printErrorOrResult env action)
lookupAct :: Env -> String -> (EnvLine -> InputT IO ()) -> InputT IO ()
lookupAct env input action = maybe (outputStrLn $ "'" ++ input ++ "' unbound") action $ M.lookup (T.pack input) env
repl :: IO Env repl :: IO Env
repl = do repl = do
home <- getHomeDirectory home <- getHomeDirectory
@ -50,21 +71,10 @@ repl = do
case parseCommand minput of case parseCommand minput of
Nothing -> pure env Nothing -> pure env
Just Quit -> pure env Just Quit -> pure env
Just DumpEnv -> dumpEnv env >> loop env Just DumpEnv -> lift (dumpEnv env) >> loop env
Just (TypeQuery input) -> Just (TypeQuery input) -> lookupAct env input (outputStrLn . prettyS . _ty) >> loop env
( case M.lookup (T.pack input) env of Just (ValueQuery input) -> lookupAct env input (outputStrLn . prettyS . _val) >> loop env
Nothing -> outputStrLn (input ++ " unbound") Just (Normalize input) -> parseActPrint env input normalize >> loop env
Just (EL{_ty = t}) -> outputStrLn $ prettyS t Just (WeakNormalize input) -> parseActPrint env input whnf >> loop env
) Just (LoadFile filename) -> lift (runExceptT $ handleFile env filename) >>= either ((>> loop env) . outputStrLn) loop
>> loop env Just (Input input) -> handleInput env input >>= loop
Just (Normalize input) ->
( case parseExpr env (T.pack input) of
Left err -> outputStrLn err
Right expr -> case runReaderT (normalize expr) env of
Left err -> outputStrLn $ show err
Right result -> outputStrLn $ prettyS result
)
>> loop env
Just (Input input) -> do
env' <- handleInput env input
loop env'

View file

@ -2,40 +2,7 @@
-- | BASIC LOGIC | -- | BASIC LOGIC |
-- -------------------------------------------------------------------------------------------------------------- -- --------------------------------------------------------------------------------------------------------------
-- first some basic logic, see <examples/logic.pg> for more details on these definitions @include logic.pg
false : * := forall (A : *), A;
false_elim (A : *) (contra : false) : A := contra A;
not (A : *) : * := A -> false;
not_intro (A : *) (h : A -> false) : not A := h;
not_elim (A B : *) (a : A) (na : not A) : B := na a B;
and (A B : *) : * := forall (C : *), (A -> B -> C) -> C;
and_intro (A B : *) (a : A) (b : B) : and A B :=
fun (C : *) (H : A -> B -> C) => H a b;
and_elim_l (A B : *) (ab : and A B) : A :=
ab A (fun (a : A) (b : B) => a);
and_elim_r (A B : *) (ab : and A B) : B :=
ab B (fun (a : A) (b : B) => b);
or (A B : *) : * := forall (C : *), (A -> C) -> (B -> C) -> C;
or_intro_l (A B : *) (a : A) : or A B :=
fun (C : *) (ha : A -> C) (hb : B -> C) => ha a;
or_intro_r (A B : *) (b : B) : or A B :=
fun (C : *) (ha : A -> C) (hb : B -> C) => hb b;
or_elim (A B C : *) (ab : or A B) (ha : A -> C) (hb : B -> C) : C :=
ab C ha hb;
eq (A : *) (x y : A) := forall (P : A -> *), P x -> P y;
eq_refl (A : *) (x : A) : eq A x x := fun (P : A -> *) (Hx : P x) => Hx;
eq_sym (A : *) (x y : A) (Hxy : eq A x y) : eq A y x := fun (P : A -> *) (Hy : P y) =>
Hxy (fun (z : A) => P z -> P x) (fun (Hx : P x) => Hx) Hy;
eq_trans (A : *) (x y z : A) (Hxy : eq A x y) (Hyz : eq A y z) : eq A x z := fun (P : A -> *) (Hx : P x) =>
Hyz P (Hxy P Hx);
eq_cong (A B : *) (x y : A) (f : A -> B) (H : eq A x y) : eq B (f x) (f y) :=
fun (P : B -> *) (Hfx : P (f x)) =>
H (fun (a : A) => P (f a)) Hfx;
-- -------------------------------------------------------------------------------------------------------------- -- --------------------------------------------------------------------------------------------------------------
-- | BASIC DEFINITIONS | -- | BASIC DEFINITIONS |

View file

@ -3,67 +3,13 @@
-- for more details on how these work. -- for more details on how these work.
-- --
false : * := forall (A : *), A; @include logic.pg
false_elim (A : *) (contra : false) : A := contra A;
not (A : *) : * := A -> false;
not_intro (A : *) (h : A -> false) : not A := h;
not_elim (A B : *) (a : A) (na : not A) : B := na a B;
and (A B : *) : * := forall (C : *), (A -> B -> C) -> C;
and_intro (A B : *) (a : A) (b : B) : and A B :=
fun (C : *) (H : A -> B -> C) => H a b;
and_elim_l (A B : *) (ab : and A B) : A :=
ab A (fun (a : A) (b : B) => a);
and_elim_r (A B : *) (ab : and A B) : B :=
ab B (fun (a : A) (b : B) => b);
or (A B : *) : * := forall (C : *), (A -> C) -> (B -> C) -> C;
or_intro_l (A B : *) (a : A) : or A B :=
fun (C : *) (ha : A -> C) (hb : B -> C) => ha a;
or_intro_r (A B : *) (b : B) : or A B :=
fun (C : *) (ha : A -> C) (hb : B -> C) => hb b;
or_elim (A B C : *) (ab : or A B) (ha : A -> C) (hb : B -> C) : C :=
ab C ha hb;
exists (A : *) (P : A -> *) : * := forall (C : *), (forall (x : A), P x -> C) -> C;
exists_intro (A : *) (P : A -> *) (a : A) (h : P a) : exists A P :=
fun (C : *) (g : forall (x : A), P x -> C) => g a h;
exists_elim (A B : *) (P : A -> *) (ex_a : exists A P) (h : forall (a : A), P a -> B) : B :=
ex_a B h;
binop (A : *) := A -> A -> A; binop (A : *) := A -> A -> A;
comp (A B C : *) (g : B -> C) (f : A -> B) (x : A) : C := comp (A B C : *) (g : B -> C) (f : A -> B) (x : A) : C :=
g (f x); g (f x);
-- --------------------------------------------------------------------------------------------------------------
--
-- Next we can define equality. This is the same as in <logic.pg>. We get a
-- couple Peano axioms for free as theorems.
--
-- implies axiom 5
-- (if x : nat, then y : nat, since we can only compare objects of the same type)
eq (A : *) (x y : A) := forall (P : A -> *), P x -> P y;
-- axiom 2 (but as a theorem)
eq_refl (A : *) (x : A) : eq A x x := fun (P : A -> *) (Hx : P x) => Hx;
-- axiom 3 (but as a theorem)
eq_sym (A : *) (x y : A) (Hxy : eq A x y) : eq A y x := fun (P : A -> *) (Hy : P y) =>
Hxy (fun (z : A) => P z -> P x) (fun (Hx : P x) => Hx) Hy;
-- axiom 4 (but as a theorem)
eq_trans (A : *) (x y z : A) (Hxy : eq A x y) (Hyz : eq A y z) : eq A x z := fun (P : A -> *) (Hx : P x) =>
Hyz P (Hxy P Hx);
-- This isn't an axiom, but is handy. If x = y, then f x = f y.
eq_cong (A B : *) (x y : A) (f : A -> B) (H : eq A x y) : eq B (f x) (f y) :=
fun (P : B -> *) (Hfx : P (f x)) =>
H (fun (a : A) => P (f a)) Hfx;
assoc (A : *) (op : binop A) := forall (c a b : A), assoc (A : *) (op : binop A) := forall (c a b : A),
eq A (op a (op b c)) (op (op a b) c); eq A (op a (op b c)) (op (op a b) c);

View file

@ -11,14 +11,16 @@ data Error
| ExpectedPiType Expr Expr | ExpectedPiType Expr Expr
| NotEquivalent Expr Expr Expr | NotEquivalent Expr Expr Expr
| PNMissingType Text | PNMissingType Text
| DuplicateDefinition Text
deriving (Eq, Ord) deriving (Eq, Ord)
instance Show Error where instance Show Error where
show SquareUntyped = "□ does not have a type" show SquareUntyped = "□ does not have a type"
show (UnboundVariable x) = "Unbound variable: " ++ T.unpack x show (UnboundVariable x) = "Unbound variable: '" ++ T.unpack x ++ "'"
show (NotASort x t) = "Expected " ++ prettyS x ++ " to have type * or □, instead found " ++ prettyS t show (NotASort x t) = "Expected '" ++ prettyS x ++ "' to have type * or □, instead found '" ++ prettyS t ++ "'"
show (ExpectedPiType x t) = prettyS x ++ " : " ++ prettyS t ++ " is not a function" show (ExpectedPiType x t) = "'" ++ prettyS x ++ "' : '" ++ prettyS t ++ "' is not a function"
show (NotEquivalent a a' e) = "Cannot unify " ++ prettyS a ++ " with " ++ prettyS a' ++ " when evaluating " ++ prettyS e show (NotEquivalent a a' e) = "Cannot unify '" ++ prettyS a ++ "' with '" ++ prettyS a' ++ "' when evaluating '" ++ prettyS e ++ "'"
show (PNMissingType x) = "Primitive Notion " ++ T.unpack x ++ " missing type ascription" show (PNMissingType x) = "Axiom '" ++ T.unpack x ++ "' missing type ascription"
show (DuplicateDefinition n) = "'" ++ T.unpack n ++ "' already defined"
type Result = Either Error type Result = Either Error

View file

@ -2,16 +2,27 @@
module Eval where module Eval where
import Control.Monad (void)
import Control.Monad.Except (MonadError (..)) import Control.Monad.Except (MonadError (..))
import Control.Monad.Reader import Control.Monad.Reader
import qualified Data.Map as M import qualified Data.Map as M
import Data.Text (Text) import Data.Text (Text)
import qualified Data.Text as T
import Errors import Errors
import Expr import Expr
data EnvLine = EL {_ty :: Expr, _val :: Expr} data EnvLine = EL {_ty :: Expr, _val :: Expr}
type Env = M.Map Text EnvLine type Env = M.Map Text EnvLine
emptyEnv :: Env
emptyEnv = M.empty
showEnvEntry :: Text -> EnvLine -> String
showEnvEntry k EL{_ty = t} = T.unpack k ++ " : " ++ prettyS t
dumpEnv :: Env -> IO ()
dumpEnv = void . M.traverseWithKey ((putStrLn .) . showEnvEntry)
-- substitute s for k *AND* decrement indices; only use after reducing a redex. -- substitute s for k *AND* decrement indices; only use after reducing a redex.
subst :: Integer -> Expr -> Expr -> Expr subst :: Integer -> Expr -> Expr -> Expr
subst k s (Var n x) subst k s (Var n x)
@ -40,7 +51,7 @@ whnf (App m n) = do
if m == m' if m == m'
then pure $ App m n then pure $ App m n
else whnf $ App m' n else whnf $ App m' n
whnf (Free n) = envLookupVal n whnf (Free n) = envLookupVal n >>= whnf
whnf e = pure e whnf e = pure e
reduce :: Expr -> ReaderT Env Result Expr reduce :: Expr -> ReaderT Env Result Expr
@ -58,29 +69,23 @@ normalize e = do
then pure e then pure e
else normalize e' else normalize e'
-- naive beta equivalence check
betaEquiv :: Expr -> Expr -> ReaderT Env Result Bool betaEquiv :: Expr -> Expr -> ReaderT Env Result Bool
betaEquiv e1 e2 = (==) <$> normalize e1 <*> normalize e2 betaEquiv e1 e2
| e1 == e2 = pure True
-- this slightly smarter beta equivalence check is a little buggy, | otherwise = do
-- failing to notice that `add one one` and `two` are beta equivalent in the e1' <- whnf e1
-- example file e2' <- whnf e2
case (e1', e2') of
-- betaEquiv :: Expr -> Expr -> ReaderT Env Result Bool (Var k1 _, Var k2 _) -> pure $ k1 == k2
-- betaEquiv e1 e2 (Free n, Free m) -> pure $ n == m
-- | e1 == e2 = pure True (Axiom n, Axiom m) -> pure $ n == m
-- | otherwise = do (Free n, e) -> envLookupVal n >>= betaEquiv e
-- e1' <- whnf e1 (e, Free n) -> envLookupVal n >>= betaEquiv e
-- e2' <- whnf e2 (Star, Star) -> pure True
-- case (e1', e2') of (Abs _ t1 v1, Abs _ t2 v2) -> (&&) <$> betaEquiv t1 t2 <*> betaEquiv v1 v2 -- i want idiom brackets
-- (Var k1 _, Var k2 _) -> pure $ k1 == k2 (Pi _ t1 v1, Pi _ t2 v2) -> (&&) <$> betaEquiv t1 t2 <*> betaEquiv v1 v2
-- (Free n, Free m) -> pure $ n == m (App m1 n1, App m2 n2) -> (&&) <$> betaEquiv m1 m2 <*> betaEquiv n1 n2
-- (Free n, e) -> envLookupVal n >>= betaEquiv e _ -> pure False -- remaining cases impossible or false
-- (e, Free n) -> envLookupVal n >>= betaEquiv e
-- (Star, Star) -> pure True
-- (Abs _ t1 v1, Abs _ t2 v2) -> (&&) <$> betaEquiv t1 t2 <*> betaEquiv v1 v2 -- i want idiom brackets
-- (Pi _ t1 v1, Pi _ t2 v2) -> (&&) <$> betaEquiv t1 t2 <*> betaEquiv v1 v2
-- _ -> pure False -- remaining cases impossible or false
checkBeta :: Env -> Expr -> Expr -> Result Bool checkBeta :: Env -> Expr -> Expr -> Result Bool
checkBeta env e1 e2 = runReaderT (betaEquiv e1 e2) env checkBeta env e1 e2 = runReaderT (betaEquiv e1 e2) env
@ -91,4 +96,4 @@ isSortPure Square = True
isSortPure _ = False isSortPure _ = False
isSort :: Expr -> ReaderT Env Result (Bool, Expr) isSort :: Expr -> ReaderT Env Result (Bool, Expr)
isSort s = (,s) . isSortPure <$> normalize s isSort s = (,s) . isSortPure <$> whnf s

View file

@ -1,16 +1,11 @@
{-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE NamedFieldPuns #-}
module Parser (parseProgram, parseDef, parseDefEmpty, parseExpr) where module Parser (parseDef, parseDefEmpty, parseExpr, parseProgram, handleFile) where
import Check import Check
import Control.Monad import Control.Monad
import Control.Monad.State.Strict ( import Control.Monad.Except
MonadState (get, put), import Control.Monad.State.Strict
State,
evalState,
modify,
runState,
)
import Data.Bifunctor (first) import Data.Bifunctor (first)
import Data.List (elemIndex) import Data.List (elemIndex)
import Data.List.NonEmpty (NonEmpty ((:|))) import Data.List.NonEmpty (NonEmpty ((:|)))
@ -21,6 +16,7 @@ import qualified Data.Text as T
import Errors (Error (..)) import Errors (Error (..))
import Eval import Eval
import Expr (Expr (..), incIndices) import Expr (Expr (..), incIndices)
import Preprocessor
import Text.Megaparsec hiding (State) import Text.Megaparsec hiding (State)
import Text.Megaparsec.Char import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L import qualified Text.Megaparsec.Char.Lexer as L
@ -135,10 +131,15 @@ checkAscription ident value massert = do
updateStateDefinition :: DefinitionLine -> Parser () updateStateDefinition :: DefinitionLine -> Parser ()
updateStateDefinition DL{_td, _body} = do updateStateDefinition DL{_td, _body} = do
env <- get
let ident = _ident _td
when (M.member ident (_env env)) (customFailure $ TE $ DuplicateDefinition ident)
modify $ modify $
modifyEnv modifyEnv
(M.insert (_ident _td) EL{_ty = _type _td, _val = _body}) (M.insert ident EL{_ty = _type _td, _val = _body})
updateStateDefinition (PN TD{_type, _ident}) = do updateStateDefinition (PN TD{_type, _ident}) = do
env <- get
when (M.member _ident (_env env)) (customFailure $ TE $ DuplicateDefinition _ident)
modify $ modify $
modifyEnv modifyEnv
(M.insert _ident EL{_ty = _type, _val = Axiom _ident}) (M.insert _ident EL{_ty = _type, _val = Axiom _ident})
@ -203,5 +204,10 @@ parseExpr env input = first errorBundlePretty $ evalState (runParserT pExpr "" i
parseDefEmpty :: Env -> Text -> (Either String (), Env) parseDefEmpty :: Env -> Text -> (Either String (), Env)
parseDefEmpty env input = runState (parseDef input) env parseDefEmpty env input = runState (parseDef input) env
parseProgram :: Text -> Either String Env parseProgram :: Env -> Text -> Either String Env
parseProgram input = first errorBundlePretty $ evalState (runParserT pProgram "" input) IS{_binds = [], _env = M.empty} parseProgram initial input = first errorBundlePretty $ evalState (runParserT pProgram "" input) IS{_binds = [], _env = initial}
handleFile :: Env -> String -> ExceptT String IO Env
handleFile initial filename = do
text <- show `withExceptT` preprocess filename
liftEither $ parseProgram initial text

37
lib/Preprocessor.hs Normal file
View file

@ -0,0 +1,37 @@
module Preprocessor where
import Control.Monad.Except
import Control.Monad.IO.Class
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import System.FilePath
import System.IO
newtype PreprocessorError = ParseError Text
instance Show PreprocessorError where
show (ParseError t) = "Preprocessor error on line '" ++ show t ++ "'"
type Result = Either PreprocessorError
type ResultIO = ExceptT PreprocessorError IO
concatMapM :: (Applicative f) => (a -> f Text) -> [a] -> f Text
concatMapM _ [] = pure mempty
concatMapM f (x : xs) = ((<>) . (<> "\n") <$> f x) <*> concatMapM f xs
preprocess :: String -> ResultIO Text
preprocess filename = do
handle <- liftIO $ openFile filename ReadMode
text <- liftIO $ T.lines <$> TIO.hGetContents handle
result <- concatMapM (preprocessLine $ takeDirectory filename) text
liftIO $ putStrLn $ "loading: " ++ filename
pure result
parseInclude :: Text -> Result Text
parseInclude line = maybe (Left $ ParseError line) pure $ T.stripPrefix "@include " line
preprocessLine :: FilePath -> Text -> ResultIO Text
preprocessLine base line
| "@include " `T.isPrefixOf` line = liftEither (parseInclude line) >>= preprocess . normalise . (base </>) . T.unpack
| otherwise = pure line

View file

@ -34,12 +34,14 @@ library perga-lib
Expr Expr
Eval Eval
Errors Errors
Preprocessor
hs-source-dirs: lib hs-source-dirs: lib
build-depends: base ^>=4.19.1.0 build-depends: base ^>=4.19.1.0
, megaparsec , megaparsec
, text , text
, parser-combinators , parser-combinators
, filepath
, mtl , mtl
, containers , containers
default-language: Haskell2010 default-language: Haskell2010