Theory C2
chapter ‹Appendix IV : Examples for Annotation Navigation and Context Serialization›
theory C2
imports "../main/C_Main"
"HOL-ex.Cartouche_Examples"
begin
text ‹ Operationally, the ⬚‹C› command can be thought of as behaving as the ⬚‹ML› command,
where it is for example possible to recursively nest C code in C. Generally, the present
chapter assumes a familiarity with all advance concepts of ML as described in
🗏‹~~/src/HOL/Examples/ML.thy›, as well as the concept of ML antiquotations
(🗏‹~~/src/Doc/Implementation/ML.thy›). However, even if⬚‹C› might resemble to ⬚‹ML›,
we will now see in detail that there are actually subtle differences between the two commands.›
section ‹Setup of ML Antiquotations Displaying the Environment (For Debugging) ›
ML‹
fun print_top make_string f _ (_, (value, _, _)) _ = tap (fn _ => writeln (make_string value)) o f
fun print_top' _ f _ _ env = tap (fn _ => writeln ("ENV " ^ C_Env.string_of env)) o f
fun print_stack s make_string stack _ _ thy =
let
val () = Output.information ("SHIFT " ^ (case s of NONE => "" | SOME s => "\"" ^ s ^ "\" ")
^ Int.toString (length stack - 1) ^ " +1 ")
val () = stack
|> split_list
|> #2
|> map_index I
|> app (fn (i, (value, pos1, pos2)) =>
writeln (" " ^ Int.toString (length stack - i) ^ " " ^ make_string value
^ " " ^ Position.here pos1 ^ " " ^ Position.here pos2))
in thy end
fun print_stack' s _ stack _ env thy =
let
val () = Output.information ("SHIFT " ^ (case s of NONE => "" | SOME s => "\"" ^ s ^ "\" ")
^ Int.toString (length stack - 1) ^ " +1 ")
val () = writeln ("ENV " ^ C_Env.string_of env)
in thy end
›
setup ‹ML_Antiquotation.inline @{binding print_top}
(Args.context
>> K ("print_top " ^ ML_Pretty.make_string_fn ^ " I"))›
setup ‹ML_Antiquotation.inline @{binding print_top'}
(Args.context
>> K ("print_top' " ^ ML_Pretty.make_string_fn ^ " I"))›
setup ‹ML_Antiquotation.inline @{binding print_stack}
(Scan.peek (fn _ => Scan.option Parse.embedded)
>> (fn name => ("print_stack "
^ (case name of NONE => "NONE"
| SOME s => "(SOME \"" ^ s ^ "\")")
^ " " ^ ML_Pretty.make_string_fn)))›
setup ‹ML_Antiquotation.inline @{binding print_stack'}
(Scan.peek (fn _ => Scan.option Parse.embedded)
>> (fn name => ("print_stack' "
^ (case name of NONE => "NONE"
| SOME s => "(SOME \"" ^ s ^ "\")")
^ " " ^ ML_Pretty.make_string_fn)))›
declare[[C_lexer_trace]]
section ‹Introduction to C Annotations: Navigating in the Parsing Stack›
subsection ‹Basics›
text ‹ Since the present theory 🗏‹C1.thy› is depending on
\<^theory>‹Isabelle_C.C_Lexer_Language› and
\<^theory>‹Isabelle_C.C_Parser_Language›, the syntax one is writing in the
⬚‹C› command is C11. Additionally, 🗏‹C1.thy› also
depends on \<^theory>‹Isabelle_C.C_Parser_Annotation›, making it possible to write
commands in C comments, called annotation commands, such as
⬚‹≈setup›. ›
C ‹
int a = (((0))); /*@@ highlight */
/*@ ≈setup ‹@{print_stack}› */
/*@ ≈setup ‹@{print_top}› */
›
text ‹ In terms of execution order, nested annotation commands are not pre-filtered out of the
C code, but executed when the C code is still being parsed. Since the parser implemented is a LALR
parser ⁋‹🌐‹https://en.wikipedia.org/wiki/LALR››, C tokens
are uniquely read and treated from left to right. Thus, each nested command is (supposed by default
to be) executed when the parser has already read all C tokens before the comment associated to the
nested command, so when the parser is in a particular intermediate parsing step (not necessarily
final)
⁋‹🌐‹https://en.wikipedia.org/wiki/Shift-reduce_parser››. ›
text ‹The command ⬚‹≈setup› is similar to the command
⬚‹setup› except that the former takes a function with additional
arguments. These arguments are precisely depending on the current parsing state. To better examine
these arguments, it is convenient to use ML antiquotations (be it for printing, or for doing any
regular ML actions like PIDE reporting).
Note that, in contrast with ⬚‹setup›, the return type of the
⬚‹≈setup› function is not
\<^ML_type>‹theory -> theory› but
\<^ML_type>‹Context.generic -> Context.generic›. ›
C ‹
int a = (((0
+ 5))) /*@@ ≈setup ‹print_top @{make_string} I›
@ highlight
*/
* 4;
float b = 7 / 3;
›
text ‹The special ‹@› symbol makes the command be executed whenever the first element ‹E›
in the stack is about to be irremediably replaced by a more structured parent element (having ‹E›
as one of its direct children). It is the parent element which is provided to the ML code.
Instead of always referring to the first element of the stack,
‹N› consecutive occurrences of ‹@› will make the ML code getting as argument the direct parent
of the ‹N›-th element.›
C ‹
int a = (((0 + 5))) /*@@ highlight */
* 4;
int a = (((0 + 5))) /*@& highlight */
* 4;
int a = (((0 + 5))) /*@@@@@ highlight */
* 4;
int a = (((0 + 5))) /*@&&&& highlight */
* 4;
›
text ‹‹&› behaves as ‹@›, but instead of always giving the designated direct parent to the ML code,
it finds the first parent ancestor making non-trivial changes in the respective grammar rule
(a non-trivial change can be for example the registration of the position of the current AST node
being built).›
C ‹
int b = 7 / (3) * 50;
/*@+++@@ highlight */
long long f (int a) {
while (0) { return 0; }
}
int b = 7 / (3) * 50;
›
text ‹‹N› consecutive occurrences of ‹+› will delay the interpretation of the comment,
which is ignored at the place it is written. The comment is only really considered after the
C parser has treated ‹N› more tokens.›
C ‹
int a = (((0 //@ ≈setup ‹fn _ => fn _ => fn _ => fn context => let in context end›
/*@ ≈setup ‹(K o K o K) I› */
)));
›
C ‹
/*@ ≈setup‹(K o K o K) (fn x => K x @{con\
text (**)})› */
int a = 0; //\
@ ≈setup‹(K o K o K) (fn x => K x @{term ‹a \
+ b› (* (**) *\
\
)})›
›
subsection ‹Erroneous Annotations Treated as Regular C Comments›
C ‹
int a = 0;
/** ≈setup ‹fn _ => fn _ => fn _ => I›
*/
›
C ‹
int a = 0;
/*@ ≈setup ‹fn _ => fn _ => fn _ => I›
≈setup ‹fn _ => fn _ => fn _ => I›
*/
/** ≈setup ‹fn _ => fn _ => fn _ => I›
≈setup ‹fn _ => fn _ => fn _ => I›
*/
›
subsection ‹Bottom-Up vs. Top-Down Evaluation›
ML‹
structure Example_Data = Generic_Data (type T = string list
val empty = [] val extend = I val merge = K empty)
fun add_ex s1 s2 =
Example_Data.map (cons s2)
#> (fn context => let val () = Output.information (s1 ^ s2)
val () = app (fn s => writeln (" Data content: " ^ s))
(Example_Data.get context)
in context end)
›
setup ‹Context.theory_map (Example_Data.put [])›
declare[[ML_source_trace]]
declare[[C_parser_trace]]
C ‹
int b,c,d/*@@ ≈setup ‹fn s => fn x => fn env => @{print_top} s x env
#> add_ex "evaluation of " "3_print_top"›
*/,e = 0; /*@@
≈setup ‹fn s => fn x => fn env => @{print_top} s x env
#> add_ex "evaluation of " "4_print_top"› */
int b,c,d/*@@ ≈setup⇓ ‹fn s => fn x => fn env => @{print_top} s x env
#> add_ex "evaluation of " "6_print_top"›
*/,e = 0; /*@@
≈setup⇓ ‹fn s => fn x => fn env => @{print_top} s x env
#> add_ex "evaluation of " "5_print_top"› */
›
subsection ‹Out of Bound Evaluation for Annotations›
C ‹
int a = 0 ;
int /*@ @ ML ‹writeln "2"›
@@@ ML ‹writeln "4"›
+@ ML ‹writeln "3"›
ML⇓‹writeln "1"› */
int a;
›
C ‹
int a = 0 /*@ ML‹writeln "1"› */;
int /*@ @@@@@ML‹writeln "5" › @@@ML‹writeln "4" › @@ML‹writeln "2" › */
/*@ @@@@@ML‹writeln "5'"› @@@ML‹writeln "4'"› @@ML‹writeln "2'"› */
a = 0;
int d = 0; /*@ ML‹writeln "3"› */
›
C ‹
int a = 0 /*@ ++@@@@ML‹writeln "2"›
++@@@ ML‹writeln "1"› */;
›
section ‹Reporting of Positions and Contextual Update of Environment›
text ‹
To show the content of the parsing environment, the ML antiquotations ‹print_top'› and ‹print_stack'›
will respectively be used instead of ‹print_top› and ‹print_stack›.
This example suite allows to explore the bindings represented in the C environment
and made accessible in PIDE for hovering. ›
subsection ‹Reporting: ‹typedef›, ‹enum››
declare [[ML_source_trace = false]]
declare [[C_lexer_trace = false]]
C ‹
typedef int i, j;
/*@@ ≈setup ‹@{print_top'}› @highlight */ //@ +++++@ ≈setup ‹@{print_top'}› +++++@highlight
int j = 0;
typedef int i, j;
j jj1 = 0;
j jj = jj1;
j j = jj1 + jj;
typedef i j;
typedef i j;
typedef i j;
i jj = jj;
j j = jj;
›
C ‹
typedef int j;
j a = 0;
typedef int k;
int main (int c) {
j b = 0;
typedef int k;
typedef k l;
k a = c;
l a = 0;
}
k a = a;
›
C ‹
enum a b;
enum a {aaa};
enum a {aaa};
enum a _;
__thread (f ( enum a, enum a vv));
enum a f (enum a a) {
}
__thread enum a f (enum a a) {
enum c {ccc};
__thread enum c f (enum c a) {
return 0;
}
enum c f (enum c a) {
return 0;
}
return 0;
}
enum z {zz};
int main (enum z *x) {
return zz; }
int main (enum a *x, ...) {
return zz; }
›
subsection ‹Continuation Calculus with the C Environment: Presentation in ML›
declare [[C_parser_trace = false]]
ML‹
val C = C_Module.C
val C' = C_Module.C' o SOME
›
C ‹
int a = 0;
int b = 7 / (3) * 50
/*@@@@@ ≈setup ‹fn _ => fn _ => fn _ =>
C ‹int b = a + a + a + a + a + a + a
;› › */;
›
C ‹
int a = 0;
int b = 7 / (3) * 50
/*@@@@@ ≈setup ‹fn _ => fn _ => fn env =>
C' env ‹int b = a + a + a + a + a + a + a
;› › */;
›
subsection ‹Continuation Calculus with the C Environment: Presentation with Outer Commands›
ML‹
val _ = Theory.setup
(C_Inner_Syntax.command0
(fn src => fn context => C' (C_Stack.Data_Lang.get' context |> #2) src context)
C_Parse.C_source
("C'", ⌂, ⌂, ⌂))
›
C ‹
int f (int a) {
int b = 7 / (3) * 50 /*@ C ‹int b = a + a + a + a + a + a + a;› */;
int c = b + a + a + a + a + a + a;
} ›
C ‹
int f (int a) {
int b = 7 / (3) * 50 /*@ C' ‹int b = a + a + a + a + a + a + a;› */;
int c = b + b + b + b + a + a + a + a + a + a;
} ›
C ‹
int f (int a) {
int b = 7 / (3) * 50 /*@ C ‹int b = a + a + a + a + a; //@ C' ‹int c = b + b + b + b + a;› › */;
int b = 7 / (3) * 50 /*@ C' ‹int b = a + a + a + a + a; //@ C' ‹int c = b + b + b + b + a;› › */;
int c = b + b + b + b + a + a + a + a + a + a;
} ›
subsection ‹Continuation Calculus with the C Environment: Deep-First Nesting vs Breadth-First Folding: Propagation of \<^ML_type>‹C_Env.env_lang››
C
‹
//@ ML ‹fun C_env src _ _ env = C' env src›
int a;
int f (int b) {
int c = 0; /*@ ≈setup ‹fn _ => fn _ => fn env =>
C' env ‹int d = a + b + c + d; //@ ≈setup ‹C_env ‹int e = a + b + c + d;›››
#> C ‹int d = a + b + c + d; //@ ≈setup ‹C_env ‹int e = a + b + c + d;›››
#> C' env ‹int d = a + b + c + d; //@ ≈setup ‹C_env ‹int e = a + b + c + d;›››
#> C ‹int d = a + b + c + d; //@ ≈setup ‹C_env ‹int e = a + b + c + d;›››
› */
int e = a + b + c + d;
}›
C ‹
#undef int
#define int(a,b) int
#define int int
int a;
int f (int b) {
int c = 0; /*@ ≈setup ‹fn _ => fn _ => fn env =>
C' env ‹int d = a + b + c + d; //@ ≈setup ‹C_env ‹int e = a + b + c + d;›››
#> C ‹int d = a + b + c + d; //@ ≈setup ‹C_env ‹int e = a + b + c + d;›››
#> C' env ‹int d = a + b + c + d; //@ ≈setup ‹C_env ‹int e = a + b + c + d;›››
#> C ‹int d = a + b + c + d; //@ ≈setup ‹C_env ‹int e = a + b + c + d;›››
› */
#undef int
int e = a + b + c + d;
}
›
subsection ‹Continuation Calculus with the C Environment: Deep-First Nesting vs Breadth-First Folding: Propagation of \<^ML_type>‹C_Env.env_tree››
ML‹
structure Data_Out = Generic_Data
(type T = int
val empty = 0
val extend = I
val merge = K empty)
fun show_env0 make_string f msg context =
Output.information ("(" ^ msg ^ ") " ^ make_string (f (Data_Out.get context)))
val show_env = tap o show_env0 @{make_string} I
›
setup ‹Context.theory_map (C_Module.Data_Accept.put (fn _ => fn _ => Data_Out.map (fn x => x + 1)))›
C ‹
typedef int i, j;
int j = 0;
typedef int i, j;
j jj1 = 0;
j jj = jj1; /*@@ ≈setup ‹fn _ => fn _ => fn _ => show_env "POSITION 0"› @≈setup ‹@{print_top'}› */
typedef int k; /*@@ ≈setup ‹fn _ => fn _ => fn env =>
C' env ‹k jj = jj; //@@ ≈setup ‹@{print_top'}›
k jj = jj + jj1;
typedef k l; //@@ ≈setup ‹@{print_top'}››
#> show_env "POSITION 1"› */
j j = jj1 + jj; //@@ ≈setup ‹@{print_top'}›
typedef i j; /*@@ ≈setup ‹fn _ => fn _ => fn _ => show_env "POSITION 2"› */
typedef i j;
typedef i j;
i jj = jj;
j j = jj;
›
ML‹show_env "POSITION 3" (Context.Theory @{theory})›
setup ‹Context.theory_map (C_Module.Data_Accept.put (fn _ => fn _ => I))›
subsection ‹Reporting: Scope of Recursive Functions›
declare [[C⇩e⇩n⇩v⇩0 = last]]
C ‹
int a = 0;
int b = a * a + 0;
int jjj = b;
int main (void main(int *x,int *y),int *jjj) {
return a + jjj + main(); }
int main2 () {
int main3 () { main2() + main(); }
int main () { main2() + main(); }
return a + jjj + main3() + main(); }
›
C ‹
int main3 () { main2 (); }
›
declare [[C⇩e⇩n⇩v⇩0 = empty]]
subsection ‹Reporting: Extensions to Function Types, Array Types›
C ‹int f (int z);›
C ‹int * f (int z);›
C ‹int (* f) (int z );›
C ‹typedef int (* f) (int z);›
C ‹int f (int z) {}›
C ‹int * f (int z) {return z;}›
C ‹int ((* f) (int z1, int z2)) {return z1 + z2;}›
C ‹int (* (* f) (int z1, int z2)) {return z1 + z2;}›
C ‹typedef int (* f) (int z); f uuu (int b) {return b;};›
C ‹typedef int (* (* f) (int z, int z)) (int a); f uuu (int b) {return b;};›
C ‹struct z { int (* f) (int z); int (* (* ff) (int z)) (int a); };›
C ‹double (* (* f (int a )) (int a, double d)) (char a);›
C ‹double (* (((* f) []) (int a)) (int b, double c)) (char d) {int a = b + c + d;}›
C ‹double ((*((f) (int a))) (int a , double)) (char c) {int a = 0;}›
C ‹
double (* (* f (int a)) (int a, double)) (char c) {
double (* (* f (int a)) (double a, int a)) (char) {
return a;
}
}
›
C ‹
f (x) int x; {return x;}
›
section ‹General Isar Commands›
locale zz begin definition "z' = ()"
end
C ‹
int a = 0;
int b = a * a + 0;
int jjj = b;
/*@
@@@ ML ‹@{lemma ‹A ∧ B ⟶ B ∧ A› by (ml_tactic ‹blast_tac ctxt 1›)}›
definition "a' = ()"
declare [[ML_source_trace]]
lemma (in zz) ‹A ∧ B ⟶ B ∧ A› by (ml_tactic ‹blast_tac ctxt 1›)
definition (in zz) "z = ()"
corollary "zz.z' = ()"
apply (unfold zz.z'_def)
by blast
theorem "True &&& True" by (auto, presburger?)
*/
›
declare [[ML_source_trace = false]]
C ‹
//@ lem\
ma (i\
n z\
z) \
‹\
AA ∧ B\
⟶\
B ∧ A\
\
A› b\
y (ml_t\
actic ‹\
bla\
st_tac c\
txt\
0\
001›)
›
section ‹Starting Parsing Rule›
subsection ‹Basics›
C ‹
/*@
declare [[C⇩r⇩u⇩l⇩e⇩0 = "statement"]]
C ‹while (a) {}›
C ‹a = 2;›
declare [[C⇩r⇩u⇩l⇩e⇩0 = "expression"]]
C ‹2 + 3›
C ‹a = 2›
C ‹a[1]›
C ‹&a›
C ‹a›
*/
›
subsection ‹Embedding in Inner Terms›
term ‹\<^C> ‹0››
term ‹\<^C>⇩u⇩n⇩i⇩t ‹f () {while (a) {}; return 0;} int a = 0;››
term ‹\<^C>⇩d⇩e⇩c⇩l ‹int a = 0; ››
term ‹\<^C>⇩e⇩x⇩p⇩r ‹a››
term ‹\<^C>⇩s⇩t⇩m⇩t ‹while (a) {}››
declare [[C⇩r⇩u⇩l⇩e⇩0 = "translation_unit"]]
term ‹\<^C> ‹int a = 0;››
subsection ‹User Defined Setup of Syntax›
setup ‹C_Module.C_Term.map_expression (fn _ => fn _ => fn _ => @{term "10 :: nat"})›
setup ‹C_Module.C_Term.map_statement (fn _ => fn _ => fn _ => @{term "20 :: nat"})›
value ‹\<^C>⇩e⇩x⇩p⇩r‹1› + \<^C>⇩s⇩t⇩m⇩t‹for (;;);››
setup ‹C_Module.C_Term.map_expression
(fn _ => fn _ => fn _ => @{term "1000 :: nat"})›
value ‹\<^C>⇩e⇩x⇩p⇩r‹1› + \<^C>⇩s⇩t⇩m⇩t‹for (;;);››
setup ‹C_Module.C_Term.map_default (fn _ => fn _ => fn _ => @{term "True"})›
subsection ‹Validity of Context for Annotations›
ML ‹fun fac x = if x = 0 then 1 else x * fac (x - 1)›
ML
‹
\<^term>‹ \<^C> ‹int c = 0; /*@ ML ‹fac 100› */› ›
›
definition
‹
term = \<^C> ‹int c = 0; /*@ ML ‹fac 100› */›
›
section ‹Scopes of Inner and Outer Terms›
ML ‹
local
fun bind scan ((stack1, (to_delay, stack2)), _) =
C_Parse.range scan
>> (fn (src, range) =>
C_Env.Parsing
( (stack1, stack2)
, ( range
, C_Inner_Syntax.bottom_up
(fn _ => fn context =>
ML_Context.exec
(tap (fn _ => Syntax.read_term (Context.proof_of context)
(Token.inner_syntax_of src)))
context)
, Symtab.empty
, to_delay)))
in
val _ =
Theory.setup
( C_Annotation.command'
("term⇩i⇩n⇩n⇩e⇩r", ⌂)
""
(bind (C_Token.syntax' (Parse.token Parse.cartouche)))
#> C_Inner_Syntax.command0
(C_Inner_Toplevel.keep'' o C_Inner_Isar_Cmd.print_term)
(C_Token.syntax' (Scan.succeed [] -- Parse.term))
("term⇩o⇩u⇩t⇩e⇩r", ⌂, ⌂, ⌂))
end
›
C ‹
int z = z;
/*@ C ‹//@ term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
C' ‹//@ term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z››
C ‹//@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
C' ‹//@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›› */›
term ‹\<^C>⇩e⇩x⇩p⇩r‹z››
C ‹
int z = z;
/*@ C ‹//@ term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
C' ‹//@ term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z››
C ‹//@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
C' ‹//@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›› */›
term ‹\<^C>⇩e⇩x⇩p⇩r‹z››
declare [[C⇩e⇩n⇩v⇩0 = last]]
C ‹
int z = z;
/*@ C ‹//@ term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
C' ‹//@ term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z››
C ‹//@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
C' ‹//@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›››
term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹z›› */›
term ‹\<^C>⇩e⇩x⇩p⇩r‹z››
declare [[C⇩e⇩n⇩v⇩0 = empty]]
C ‹
int a;
int f (int b) {
int c = 0;
/*@ ≈setup ‹fn _ => fn _ => fn env =>
C' env ‹int d = a + b + c + d; //@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d›› term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d›››
#> C ‹int d = a + b + c + d; //@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d›› term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d›››
#> C' env ‹int d = a + b + c + d; //@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d›› term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d›››
#> C ‹int d = a + b + c + d; //@ term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d›› term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d›››
›
term⇩i⇩n⇩n⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d››
term⇩o⇩u⇩t⇩e⇩r ‹\<^C>⇩e⇩x⇩p⇩r‹c› + \<^C>⇩e⇩x⇩p⇩r‹d›› */
int e = a + b + c + d;
}›
section ‹Calculation in Directives›
subsection ‹Annotation Command Classification›
C ‹
int a = 0;
//@ ≈setup ‹K (K (K I))›
//@@ ≈setup⇓ ‹K (K (K I))›
//@ highlight
//@@ highlight⇓
//@ #setup I
//@ setup I
//@@ setup⇓ I
//@ #ML I
//@ ML I
//@@ ML⇓ I
//@ #C ‹›
//@ C ‹›
//@@ C⇓ ‹›
›
C ‹
//@+++++ ML ‹writeln "2"›
int a = 0;
//@@ ML⇓ ‹writeln "3"›
//@ #ML ‹writeln "1"›
›
C ‹
//* lemma True by simp
//* #lemma True #by simp
›
C ‹ /*@
lemma ‹1 = one›
‹2 = two›
‹two + one = three›
by auto
#definition [simp]: ‹three = 3›
#definition [simp]: ‹two = 2›
#definition [simp]: ‹one = 1›
*/ ›
subsection ‹Generalizing ML Antiquotations with C Directives›
ML ‹
structure Directive_setup_define = Generic_Data
(type T = int
val empty = 0
val extend = I
val merge = K empty)
fun setup_define1 pos f =
C_Directive.setup_define
pos
(fn toks => fn (name, (pos1, _)) =>
tap (fn _ => writeln ("Executing " ^ name ^ Position.here pos1 ^ " (only once)"))
#> pair (f toks))
(K I)
fun setup_define2 pos = C_Directive.setup_define pos (K o pair)
›
C ‹
/*@
#setup ‹
setup_define2
⌂
(fn (name, (pos1, _)) =>
op ` Directive_setup_define.get
#>> (case name of "f3" => curry op * 152263 | _ => curry op + 1)
#> tap (fn (nb, _) =>
tracing ("Executing antiquotation " ^ name ^ Position.here pos1
^ " (number = " ^ Int.toString nb ^ ")"))
#> uncurry Directive_setup_define.put)
›
*/
#define f1
#define f2 int a = 0;
#define f3
f1
f2
f1
f3
//@ #setup ‹setup_define2 ⌂ (K I)›
f3
#define f3
f3
›
C ‹
//@ #setup ‹setup_define1 ⌂ (K [])›
#define f int a = 0;
f f f f
//@ #setup ‹setup_define1 ⌂ (fn toks => toks @ toks)›
#define f int b = a;
f f
//@ #setup ‹setup_define1 ⌂ I›
#define f int a = 0;
f f
›
section ‹Miscellaneous›
C ‹
# include <a\b\\c>
f(){0 + 0;}
# if if elif
#include <stdio.h>
if then else ;
# elif
#else\
#define FOO 00 0 "" ((
FOO(FOO(a,b,c))
#endif›
C ‹
#define F <stdio.h>
#define G "stdio\h"
#define H "stdio_h"
int f = "";
int g = "";
int h = H "";
#include F
›
C ‹ \
\
# define \
a
a a
›
C ‹
#include <stdio.h>
int main () {
char * _ = "\x00001";
char *  _  = " ";
char * √≥🌐√≤ = "√≥🌐√≤";
printf ("%s %s", √≥🌐√≤, _¬†);
}
›
ML‹ C_Parse.ML_source ›
declare[[C⇩e⇩n⇩v⇩0 = last]]
ML‹@{C⇩e⇩n⇩v}›
ML‹C_Stack.Data_Lang.get' :
Context.generic ->
(LALR_Table.state, C_Grammar_Rule.svalue0, Position.T) C_Env.stack_elem0 list * C_Env.env_lang;
C_Parse.C_source: Input.source C_Parse.parser ;
C_Inner_Syntax.command0 ;
C';
C;
›
declare [[C⇩r⇩u⇩l⇩e⇩0 = "expression"]]
ML‹
val src = ‹a + b›;
val ctxt = (Context.Proof @{context});
val ctxt' = C' @{C⇩e⇩n⇩v} src ctxt;
C_Module.Data_In_Env.get ctxt'
›
ML‹val _ = @{term ‹3::nat›}›
ML‹ ML_Antiquotation.inline_embedded;
›
declare [[C⇩r⇩u⇩l⇩e⇩0 = "translation_unit"]]
end