三个列表之间关系的归纳顺序

问题描述

我正在研究字符串语法理论,但我完全被一个特定的定理所阻碍。我尝试过的每一个归纳排序最终都陷入了荒谬和无用的归纳假设,我不确定我错过了什么。

我现在已多次重读此 "Varying the Induction Hypothesis" 部分,试图了解我做错了什么,但似乎我遵循他们关于一般性的建议就好了。

简单地说,我很困惑。非常感谢您的指导!

这是我的定义和难点定理的概述。稍后我会给出我的完整脚本。

(* I plan to make these definitions more complex in the future *)
Definition TokenDefinition := String.string.
Definition Token := String.string.
Definition TokenMatches (def: TokenDefinition) (token: Token): Prop := def = token.

Definition TokenPath := list TokenDefinition.
Definition TokenStream := list Token.

Inductive PathMatchesStream: TokenPath -> TokenStream -> Prop :=
    | PathMatchesStream_base: forall def token,TokenMatches def token
        -> PathMatchesStream [def] [token]
    | PathMatchesStream_append: forall def token path stream,TokenMatches def token
        -> PathMatchesStream path stream
        -> PathMatchesStream (def :: path) (token :: stream)
.

(* the problematic theorem *)
Theorem PathMatchesStream_same_if_match_same:
    forall a b stream,PathMatchesStream a stream
        -> PathMatchesStream b stream
        -> a = b.
Proof.
    (* my full script has many failed attempts *)
Qed.

这是我的完整脚本,如果您拥有来自 Certified Programming with Dependent TypesCpdt,它应该是可以运行的(我非常喜欢他的自动化风格)。

Set Implicit Arguments. Set Asymmetric Patterns.
Require Import List.
Import ListNotations.
Open Scope list_scope.
Require String.
Require Import Cpdt.CpdtTactics.
Require Import PeanoNat Lt.

Definition TokenDefinition := String.string.
Definition Token := String.string.

Definition TokenMatches (def: TokenDefinition) (token: Token): Prop := def = token.
Hint Unfold TokenMatches: core.

Ltac simpl_TokenMatches :=
    unfold TokenMatches in *; subst.

Theorem TokenDefinition_match_same_then_same:
    forall a b token,TokenMatches a token -> TokenMatches b token -> a = b.
Proof. crush. Qed.


Definition TokenPath := list TokenDefinition.
Definition TokenStream := list Token.

Inductive PathMatchesStream: TokenPath -> TokenStream -> Prop :=
    | PathMatchesStream_base: forall def token,TokenMatches def token
        -> PathMatchesStream path stream
        -> PathMatchesStream (def :: path) (token :: stream)
.
Hint Constructors PathMatchesStream: core.

Ltac invert_PathMatchesStream :=
    crush; repeat match goal with
        | [ H : TokenMatches _ _ |- _ ] =>
            simpl_TokenMatches; crush
        | [ H : PathMatchesStream _ _ |- _ ] =>
            solve [inversion H; clear H; crush]
    end.

Theorem PathMatchesStream_path_not_empty:
    forall stream,~(PathMatchesStream [] stream).
Proof. invert_PathMatchesStream. Qed.
Hint Resolve PathMatchesStream_path_not_empty: core.

Theorem PathMatchesStream_stream_not_empty:
    forall path,~(PathMatchesStream path []).
Proof. invert_PathMatchesStream. Qed.
Hint Resolve PathMatchesStream_stream_not_empty: core.

Theorem PathMatchesStream_length_non_zero:
    forall path stream,PathMatchesStream path stream -> 0 < (length path) /\ 0 < (length stream).
Proof. invert_PathMatchesStream. Qed.
Hint Resolve PathMatchesStream_length_non_zero: core.

Theorem PathMatchesStream_same_if_match_same:
    forall a b stream,PathMatchesStream a stream
        -> PathMatchesStream b stream
        -> a = b.
Proof.

intros a; induction a as [| atok a IHa]; intros b; induction b as [| btok b IHb].

- invert_PathMatchesStream.
- invert_PathMatchesStream.
- invert_PathMatchesStream.

-
induction stream as [| tok stream IHstream].

+ invert_PathMatchesStream.


+
intros Ha Hb.

apply IHa.

(*intros a b stream Ha.
generalize dependent b.
induction Ha.
-
intros b Hb.
unfold TokenMatches in *.
induction Hb.
+

-




intros a.

induction a as [| atok a IHa].

- invert_PathMatchesStream.

-
intros b.
induction b as [| btok b IHb].
-- invert_PathMatchesStream.
--
intros stream.
induction stream as [| tok stream IHstream].
+ invert_PathMatchesStream.
+
intros Ha.
induction Ha; intros Hb; induction Hb.
++ apply IHb.
++ invert_PathMatchesStream.
++
inversion Ha; clear Ha; inversion Hb; clear Hb; invert_PathMatchesStream.
++
apply IHa.
++
*)

(*intros a b stream.
generalize dependent b.
generalize dependent a.
induction stream as [| tok stream IHstream].
- invert_PathMatchesStream.
-
intros a.
induction a as [| atok a IHa].
-- invert_PathMatchesStream.
--
intros b.
induction b as [| btok b IHb].
++ invert_PathMatchesStream.
++
intros Ha Hb.

induction Ha; induction Hb; simpl_TokenMatches.
crush.
invert_PathMatchesStream.*)

Qed.

解决方法

对于这个定理,你实际上不需要任何太花哨的东西。只是一个辅助引理:

Require Import Coq.Lists.List.
Require Import Coq.Strings.String.
Import ListNotations.


(* I plan to make these definitions more complex in the future *)
Definition TokenDefinition := String.string.
Definition Token := String.string.
Definition TokenMatches (def: TokenDefinition) (token: Token): Prop := def = token.

Definition TokenPath := list TokenDefinition.
Definition TokenStream := list Token.

Inductive PathMatchesStream: TokenPath -> TokenStream -> Prop :=
    | PathMatchesStream_base: forall def token,TokenMatches def token
        -> PathMatchesStream [def] [token]
    | PathMatchesStream_append: forall def token path stream,TokenMatches def token
        -> PathMatchesStream path stream
        -> PathMatchesStream (def :: path) (token :: stream)
.

Theorem PathMatchesStream_same_if_match_same_aux :
    forall a stream,PathMatchesStream a stream -> a = stream.
Proof.
intros a b H.
now induction H; unfold TokenMatches in *; subst.
Qed.

(* the problematic theorem *)
Theorem PathMatchesStream_same_if_match_same:
    forall a b stream,PathMatchesStream a stream
        -> PathMatchesStream b stream
        -> a = b.
Proof.
now intros
   ? ? ? ->%PathMatchesStream_same_if_match_same_aux ->%PathMatchesStream_same_if_match_same_aux.
Qed.

我不知道这是否仍然足以满足您所想到的改进,不过...

相关问答

错误1:Request method ‘DELETE‘ not supported 错误还原:...
错误1:启动docker镜像时报错:Error response from daemon:...
错误1:private field ‘xxx‘ is never assigned 按Alt...
报错如下,通过源不能下载,最后警告pip需升级版本 Requirem...