diff --git a/data/real_cases/arabidopsis.aeon b/data/real_cases/arabidopsis.aeon new file mode 100644 index 0000000..6137b7a --- /dev/null +++ b/data/real_cases/arabidopsis.aeon @@ -0,0 +1,37 @@ +## PROPERTIES +#! dynamic_property: fp1: #`(3{x}: (@{x}: (AGO1 & ~AGO10 & ~AGO7 & ANT & ARF4 & ~AS1 & ~AS2 & ETT & FIL & KAN1 & miR165 & miR390 & ~REV & ~TAS3siRNA & AGO1_miR165 & ~AGO7_miR390 & ~AS1_AS2 & AUXINh & ~CKh & ~GTE6 & ~IPT5 & (AX (AGO1 & ~AGO10 & ~AGO7 & ANT & ARF4 & ~AS1 & ~AS2 & ETT & FIL & KAN1 & miR165 & miR390 & ~REV & ~TAS3siRNA & AGO1_miR165 & ~AGO7_miR390 & ~AS1_AS2 & AUXINh & ~CKh & ~GTE6 & ~IPT5)))))`# +#! dynamic_property: fp2: #`(3{x}: (@{x}: (~AGO1 & AGO10 & AGO7 & ANT & ~ARF4 & AS1 & AS2 & ~ETT & ~FIL & ~KAN1 & ~miR165 & miR390 & REV & TAS3siRNA & ~AGO1_miR165 & AGO7_miR390 & AS1_AS2 & AUXINh & CKh & GTE6 & IPT5 & (AX (~AGO1 & AGO10 & AGO7 & ANT & ~ARF4 & AS1 & AS2 & ~ETT & ~FIL & ~KAN1 & ~miR165 & miR390 & REV & TAS3siRNA & ~AGO1_miR165 & AGO7_miR390 & AS1_AS2 & AUXINh & CKh & GTE6 & IPT5)))))`# + +## MODEL +AUXINh ->? AUXINh +AUXINh ->? miR390 +miR390 -> AGO7_miR390 +AGO7 -> AGO7_miR390 +REV ->? AGO7 +AUXINh ->? REV +AGO1_miR165 -| REV +AUXINh ->? ANT +AUXINh ->? ARF4 +FIL ->? ARF4 +TAS3siRNA -| ARF4 +ANT ->? FIL +ARF4 ->? FIL +ETT ->? FIL +AGO7_miR390 -> TAS3siRNA +FIL ->? ETT +TAS3siRNA -| ETT +FIL ->? KAN1 +AS1_AS2 -|? KAN1 +AS2 -> AS1_AS2 +AS1 -> AS1_AS2 +miR165 -> AGO1_miR165 +AGO1 -> AGO1_miR165 +KAN1 -| AS2 +GTE6 -> AS1 +CKh ->? GTE6 +TAS3siRNA -|? miR165 +AS1_AS2 -|? miR165 +IPT5 -> CKh +REV ->? IPT5 +AGO10 -|? AGO1 +REV ->? AGO10 \ No newline at end of file diff --git a/data/real_cases/arabidopsis.json b/data/real_cases/arabidopsis.json new file mode 100644 index 0000000..aca8033 --- /dev/null +++ b/data/real_cases/arabidopsis.json @@ -0,0 +1,862 @@ +{ + "model": { + "variables": [ + { + "id": "AGO1", + "name": "AGO1", + "update_fn": "" + }, + { + "id": "AGO10", + "name": "AGO10", + "update_fn": "" + }, + { + "id": "AGO1_miR165", + "name": "AGO1_miR165", + "update_fn": "" + }, + { + "id": "AGO7", + "name": "AGO7", + "update_fn": "" + }, + { + "id": "AGO7_miR390", + "name": "AGO7_miR390", + "update_fn": "" + }, + { + "id": "ANT", + "name": "ANT", + "update_fn": "" + }, + { + "id": "ARF4", + "name": "ARF4", + "update_fn": "" + }, + { + "id": "AS1", + "name": "AS1", + "update_fn": "" + }, + { + "id": "AS1_AS2", + "name": "AS1_AS2", + "update_fn": "" + }, + { + "id": "AS2", + "name": "AS2", + "update_fn": "" + }, + { + "id": "AUXINh", + "name": "AUXINh", + "update_fn": "" + }, + { + "id": "CKh", + "name": "CKh", + "update_fn": "" + }, + { + "id": "ETT", + "name": "ETT", + "update_fn": "" + }, + { + "id": "FIL", + "name": "FIL", + "update_fn": "" + }, + { + "id": "GTE6", + "name": "GTE6", + "update_fn": "" + }, + { + "id": "IPT5", + "name": "IPT5", + "update_fn": "" + }, + { + "id": "KAN1", + "name": "KAN1", + "update_fn": "" + }, + { + "id": "REV", + "name": "REV", + "update_fn": "" + }, + { + "id": "TAS3siRNA", + "name": "TAS3siRNA", + "update_fn": "" + }, + { + "id": "miR165", + "name": "miR165", + "update_fn": "" + }, + { + "id": "miR390", + "name": "miR390", + "update_fn": "" + } + ], + "regulations": [ + { + "regulator": "AGO1", + "target": "AGO1_miR165", + "sign": "Activation", + "essential": "True" + }, + { + "regulator": "AGO10", + "target": "AGO1", + "sign": "Inhibition", + "essential": "Unknown" + }, + { + "regulator": "AGO1_miR165", + "target": "REV", + "sign": "Inhibition", + "essential": "True" + }, + { + "regulator": "AGO7", + "target": "AGO7_miR390", + "sign": "Activation", + "essential": "True" + }, + { + "regulator": "AGO7_miR390", + "target": "TAS3siRNA", + "sign": "Activation", + "essential": "True" + }, + { + "regulator": "ANT", + "target": "FIL", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "ARF4", + "target": "FIL", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "AS1", + "target": "AS1_AS2", + "sign": "Activation", + "essential": "True" + }, + { + "regulator": "AS1_AS2", + "target": "KAN1", + "sign": "Inhibition", + "essential": "Unknown" + }, + { + "regulator": "AS1_AS2", + "target": "miR165", + "sign": "Inhibition", + "essential": "Unknown" + }, + { + "regulator": "AS2", + "target": "AS1_AS2", + "sign": "Activation", + "essential": "True" + }, + { + "regulator": "AUXINh", + "target": "ANT", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "AUXINh", + "target": "ARF4", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "AUXINh", + "target": "AUXINh", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "AUXINh", + "target": "REV", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "AUXINh", + "target": "miR390", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "CKh", + "target": "GTE6", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "ETT", + "target": "FIL", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "FIL", + "target": "ARF4", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "FIL", + "target": "ETT", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "FIL", + "target": "KAN1", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "GTE6", + "target": "AS1", + "sign": "Activation", + "essential": "True" + }, + { + "regulator": "IPT5", + "target": "CKh", + "sign": "Activation", + "essential": "True" + }, + { + "regulator": "KAN1", + "target": "AS2", + "sign": "Inhibition", + "essential": "True" + }, + { + "regulator": "REV", + "target": "AGO10", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "REV", + "target": "AGO7", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "REV", + "target": "IPT5", + "sign": "Activation", + "essential": "Unknown" + }, + { + "regulator": "TAS3siRNA", + "target": "ARF4", + "sign": "Inhibition", + "essential": "True" + }, + { + "regulator": "TAS3siRNA", + "target": "ETT", + "sign": "Inhibition", + "essential": "True" + }, + { + "regulator": "TAS3siRNA", + "target": "miR165", + "sign": "Inhibition", + "essential": "Unknown" + }, + { + "regulator": "miR165", + "target": "AGO1_miR165", + "sign": "Activation", + "essential": "True" + }, + { + "regulator": "miR390", + "target": "AGO7_miR390", + "sign": "Activation", + "essential": "True" + } + ], + "uninterpreted_fns": [], + "layouts": [ + { + "id": "default", + "name": "default", + "nodes": [ + { + "layout": "default", + "variable": "ARF4", + "px": -74.91142, + "py": -218.5323 + }, + { + "layout": "default", + "variable": "CKh", + "px": -235.80388, + "py": -65.14867 + }, + { + "layout": "default", + "variable": "ANT", + "px": -152.03842, + "py": -183.62268 + }, + { + "layout": "default", + "variable": "AGO7", + "px": 315.59872, + "py": 76.21962 + }, + { + "layout": "default", + "variable": "FIL", + "px": 27.96114, + "py": -238.2047 + }, + { + "layout": "default", + "variable": "AS1_AS2", + "px": 222.5151, + "py": 198.69817 + }, + { + "layout": "default", + "variable": "ETT", + "px": 116.12924, + "py": -226.75644 + }, + { + "layout": "default", + "variable": "miR390", + "px": 180.1229, + "py": -203.43517 + }, + { + "layout": "default", + "variable": "KAN1", + "px": -172.00859, + "py": 186.5409 + }, + { + "layout": "default", + "variable": "IPT5", + "px": -244.31677, + "py": 0.3059232 + }, + { + "layout": "default", + "variable": "AGO10", + "px": -211.38138, + "py": 125.12924 + }, + { + "layout": "default", + "variable": "AGO7_miR390", + "px": 279.33173, + "py": 146.2002 + }, + { + "layout": "default", + "variable": "GTE6", + "px": 313.60553, + "py": -56.795307 + }, + { + "layout": "default", + "variable": "AS2", + "px": 282.20248, + "py": -112.04745 + }, + { + "layout": "default", + "variable": "AS1", + "px": 239.0624, + "py": -165.63762 + }, + { + "layout": "default", + "variable": "REV", + "px": -246.3728, + "py": 49.55174 + }, + { + "layout": "default", + "variable": "TAS3siRNA", + "px": 319.6096, + "py": 5.985072 + }, + { + "layout": "default", + "variable": "AGO1_miR165", + "px": 33.103477, + "py": 250.55807 + }, + { + "layout": "default", + "variable": "miR165", + "px": 141.75871, + "py": 242.05377 + }, + { + "layout": "default", + "variable": "AUXINh", + "px": -197.56892, + "py": -128.57117 + }, + { + "layout": "default", + "variable": "AGO1", + "px": -87.963394, + "py": 234.34479 + } + ] + } + ] + }, + "datasets": [], + "dyn_properties": [ + { + "id": "fp2", + "name": "fp2", + "variant": "GenericDynProp", + "formula": "(3{x}: (@{x}: (~AGO1 & AGO10 & AGO7 & ANT & ~ARF4 & AS1 & AS2 & ~ETT & ~FIL & ~KAN1 & ~miR165 & miR390 & REV & TAS3siRNA & ~AGO1_miR165 & AGO7_miR390 & AS1_AS2 & AUXINh & CKh & GTE6 & IPT5 & (AX (~AGO1 & AGO10 & AGO7 & ANT & ~ARF4 & AS1 & AS2 & ~ETT & ~FIL & ~KAN1 & ~miR165 & miR390 & REV & TAS3siRNA & ~AGO1_miR165 & AGO7_miR390 & AS1_AS2 & AUXINh & CKh & GTE6 & IPT5)))))" + }, + { + "id": "fp1", + "name": "fp1", + "variant": "GenericDynProp", + "formula": "(3{x}: (@{x}: (AGO1 & ~AGO10 & ~AGO7 & ANT & ARF4 & ~AS1 & ~AS2 & ETT & FIL & KAN1 & miR165 & miR390 & ~REV & ~TAS3siRNA & AGO1_miR165 & ~AGO7_miR390 & ~AS1_AS2 & AUXINh & ~CKh & ~GTE6 & ~IPT5 & (AX (AGO1 & ~AGO10 & ~AGO7 & ANT & ARF4 & ~AS1 & ~AS2 & ETT & FIL & KAN1 & miR165 & miR390 & ~REV & ~TAS3siRNA & AGO1_miR165 & ~AGO7_miR390 & ~AS1_AS2 & AUXINh & ~CKh & ~GTE6 & ~IPT5)))))" + } + ], + "stat_properties": [ + { + "id": "essentiality_TAS3siRNA_ETT", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "TAS3siRNA", + "target": "ETT", + "value": "True", + "context": null + }, + { + "id": "monotonicity_TAS3siRNA_ETT", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "TAS3siRNA", + "target": "ETT", + "value": "Inhibition", + "context": null + }, + { + "id": "monotonicity_AUXINh_AUXINh", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AUXINh", + "target": "AUXINh", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_miR165_AGO1_miR165", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "miR165", + "target": "AGO1_miR165", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_FIL_ETT", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "FIL", + "target": "ETT", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_AS1_AS2_KAN1", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AS1_AS2", + "target": "KAN1", + "value": "Inhibition", + "context": null + }, + { + "id": "monotonicity_FIL_KAN1", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "FIL", + "target": "KAN1", + "value": "Activation", + "context": null + }, + { + "id": "essentiality_miR165_AGO1_miR165", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "miR165", + "target": "AGO1_miR165", + "value": "True", + "context": null + }, + { + "id": "essentiality_AGO1_AGO1_miR165", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "AGO1", + "target": "AGO1_miR165", + "value": "True", + "context": null + }, + { + "id": "monotonicity_FIL_ARF4", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "FIL", + "target": "ARF4", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_AUXINh_miR390", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AUXINh", + "target": "miR390", + "value": "Activation", + "context": null + }, + { + "id": "essentiality_AGO7_AGO7_miR390", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "AGO7", + "target": "AGO7_miR390", + "value": "True", + "context": null + }, + { + "id": "monotonicity_AUXINh_ARF4", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AUXINh", + "target": "ARF4", + "value": "Activation", + "context": null + }, + { + "id": "essentiality_AS1_AS1_AS2", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "AS1", + "target": "AS1_AS2", + "value": "True", + "context": null + }, + { + "id": "monotonicity_AS1_AS2_miR165", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AS1_AS2", + "target": "miR165", + "value": "Inhibition", + "context": null + }, + { + "id": "monotonicity_AGO1_AGO1_miR165", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AGO1", + "target": "AGO1_miR165", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_TAS3siRNA_miR165", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "TAS3siRNA", + "target": "miR165", + "value": "Inhibition", + "context": null + }, + { + "id": "monotonicity_AGO1_miR165_REV", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AGO1_miR165", + "target": "REV", + "value": "Inhibition", + "context": null + }, + { + "id": "essentiality_KAN1_AS2", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "KAN1", + "target": "AS2", + "value": "True", + "context": null + }, + { + "id": "essentiality_AGO1_miR165_REV", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "AGO1_miR165", + "target": "REV", + "value": "True", + "context": null + }, + { + "id": "essentiality_miR390_AGO7_miR390", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "miR390", + "target": "AGO7_miR390", + "value": "True", + "context": null + }, + { + "id": "monotonicity_AUXINh_ANT", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AUXINh", + "target": "ANT", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_AGO7_miR390_TAS3siRNA", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AGO7_miR390", + "target": "TAS3siRNA", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_ARF4_FIL", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "ARF4", + "target": "FIL", + "value": "Activation", + "context": null + }, + { + "id": "essentiality_TAS3siRNA_ARF4", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "TAS3siRNA", + "target": "ARF4", + "value": "True", + "context": null + }, + { + "id": "monotonicity_REV_AGO7", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "REV", + "target": "AGO7", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_AGO10_AGO1", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AGO10", + "target": "AGO1", + "value": "Inhibition", + "context": null + }, + { + "id": "monotonicity_GTE6_AS1", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "GTE6", + "target": "AS1", + "value": "Activation", + "context": null + }, + { + "id": "essentiality_GTE6_AS1", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "GTE6", + "target": "AS1", + "value": "True", + "context": null + }, + { + "id": "monotonicity_AS1_AS1_AS2", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AS1", + "target": "AS1_AS2", + "value": "Activation", + "context": null + }, + { + "id": "essentiality_IPT5_CKh", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "IPT5", + "target": "CKh", + "value": "True", + "context": null + }, + { + "id": "monotonicity_miR390_AGO7_miR390", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "miR390", + "target": "AGO7_miR390", + "value": "Activation", + "context": null + }, + { + "id": "essentiality_AGO7_miR390_TAS3siRNA", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "AGO7_miR390", + "target": "TAS3siRNA", + "value": "True", + "context": null + }, + { + "id": "monotonicity_AUXINh_REV", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AUXINh", + "target": "REV", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_CKh_GTE6", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "CKh", + "target": "GTE6", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_IPT5_CKh", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "IPT5", + "target": "CKh", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_AGO7_AGO7_miR390", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AGO7", + "target": "AGO7_miR390", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_REV_IPT5", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "REV", + "target": "IPT5", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_ANT_FIL", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "ANT", + "target": "FIL", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_REV_AGO10", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "REV", + "target": "AGO10", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_KAN1_AS2", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "KAN1", + "target": "AS2", + "value": "Inhibition", + "context": null + }, + { + "id": "essentiality_AS2_AS1_AS2", + "name": "Regulation essentiality property", + "variant": "RegulationEssential", + "input": "AS2", + "target": "AS1_AS2", + "value": "True", + "context": null + }, + { + "id": "monotonicity_AS2_AS1_AS2", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "AS2", + "target": "AS1_AS2", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_ETT_FIL", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "ETT", + "target": "FIL", + "value": "Activation", + "context": null + }, + { + "id": "monotonicity_TAS3siRNA_ARF4", + "name": "Regulation monotonicity property", + "variant": "RegulationMonotonic", + "input": "TAS3siRNA", + "target": "ARF4", + "value": "Inhibition", + "context": null + } + ] +} \ No newline at end of file diff --git a/data/real_cases/readme.md b/data/real_cases/readme.md new file mode 100644 index 0000000..31d326d --- /dev/null +++ b/data/real_cases/readme.md @@ -0,0 +1,15 @@ +A real case studies collected and adapted to BN sketches framework: + +1) Apoptosis case study: + - background: This model and expected attractor data is taken from [this paper](https://doi.org/10.3389/fgene.2018.00039) about the BN inference tool Griffin. Originally, the model was developed by La Rota et al. in [this paper](https://doi.org/10.1105/tpc.111.092619). + - model: We use the fully unspecified model directly as is. + - properties: We use two fixed-point properties adapted from the Griffin paper. + - candidates after static: 4761711360 + - candidates after all: 439296 + +2) TLGL case study: + - background: The original model was developed by Saadatpour et al. in [this paper](https://doi.org/10.1371/journal.pcbi.1002267). It is a reduced version of the model developed by Zhang et al. in [this article](https://doi.org/10.1073/pnas.0806447105). The experimental attractor data comes [this work](https://doi.org/10.1371/journal.pcbi.1002267) as well. + - model: We use a partially specified version of the reduced model, only assuming that Apoptosis must inactivate all variables. + - properties: We use a fixed-point property for a "healthy attractor" (cell death) and a complex attractor property for a "diseased attractor", based on provided data regarding the T-LGL state. + - candidates after static: 1296 + - candidates after all: 486 \ No newline at end of file diff --git a/data/real_models/annotated_tlgl.aeon b/data/real_cases/tlgl.aeon similarity index 100% rename from data/real_models/annotated_tlgl.aeon rename to data/real_cases/tlgl.aeon diff --git a/data/real_models/tlgl.json b/data/real_cases/tlgl.json similarity index 100% rename from data/real_models/tlgl.json rename to data/real_cases/tlgl.json diff --git a/data/synthetic_cases/cell_div_b_9v.aeon b/data/synthetic_cases/cell_div_b_9v.aeon new file mode 100644 index 0000000..19d2de3 --- /dev/null +++ b/data/synthetic_cases/cell_div_b_9v.aeon @@ -0,0 +1,17 @@ +## PROPERTIES +#! dynamic_property: attr1: #`(3{x}: (@{x}: (~CckA & ~ChpT & ClpXP_RcdA & ~CpdR & ~CtrAb & DivJ & DivK & ~DivL & ~PleC & (AG EF(~CckA & ~ChpT & ClpXP_RcdA & ~CpdR & ~CtrAb & DivJ & DivK & ~DivL & ~PleC)))))`# +#! dynamic_property: attr2: #`(3{x}: (@{x}: (CckA & ChpT & ~ClpXP_RcdA & CpdR & CtrAb & ~DivJ & ~DivK & DivL & PleC & (AG EF (CckA & ChpT & ~ClpXP_RcdA & CpdR & CtrAb & ~DivJ & ~DivK & DivL & PleC)))))`# + +## MODEL +DivK -? DivL +DivL -? CckA +CckA -? ChpT +ChpT -? CpdR +CpdR -? ClpXP_RcdA +ChpT -? CtrAb +ClpXP_RcdA -? CtrAb +DivJ -? DivK +PleC -? DivK +DivK -? DivJ +PleC -? DivJ +DivK -? PleC \ No newline at end of file diff --git a/data/synthetic_cases/readme.md b/data/synthetic_cases/readme.md new file mode 100644 index 0000000..6f2ac51 --- /dev/null +++ b/data/synthetic_cases/readme.md @@ -0,0 +1,5 @@ +A set of benchmarks based on partially specified variants of real models, with synthetic attractor data. +The data was sampled from the attractors of the original model. + +Expected results (numbers of candidates): +- cell_div_b: 64000 after static, 14088 after dynamic \ No newline at end of file diff --git a/src-tauri/src/sketchbook/properties/dynamic_props/_mk_hctl_formulas.rs b/src-tauri/src/algorithms/eval_dynamic/encode.rs similarity index 87% rename from src-tauri/src/sketchbook/properties/dynamic_props/_mk_hctl_formulas.rs rename to src-tauri/src/algorithms/eval_dynamic/encode.rs index 0e8327a..1a57e45 100644 --- a/src-tauri/src/sketchbook/properties/dynamic_props/_mk_hctl_formulas.rs +++ b/src-tauri/src/algorithms/eval_dynamic/encode.rs @@ -1,11 +1,45 @@ use crate::sketchbook::observations::{DataCategory, Dataset, Observation, VarValue}; +use crate::sketchbook::properties::HctlFormula; + +/// Encode an observation by a (propositional) formula depicting the corresponding state/sub-space. +/// The observation's binary values are used to create a conjunction of literals. +/// The `var_names` are used as propositions names in the formula. +pub fn try_encode_observation( + obs: &Observation, + var_names: &[String], +) -> Result { + let formula = encode_observation_str(obs, var_names)?; + HctlFormula::try_from_str(&formula) +} + +/// Encode each of the several observations, one by one. +/// For details, see [Self::encode_observation]. +pub fn try_encode_multiple_observations( + observations: &[Observation], + var_names: &[String], +) -> Result, String> { + let formulae = encode_multiple_observations_str(observations, var_names)?; + formulae + .iter() + .map(|f| HctlFormula::try_from_str(f)) + .collect::, String>>() +} + +/// Encode a dataset of observations as a single HCTL formula. The particular formula +/// template is chosen depending on the type of data (attractor data, time-series, ...). +/// +/// Only data with their type specified can be encoded. +pub fn try_encode_dataset_hctl(dataset: &Dataset) -> Result { + let formula = encode_dataset_hctl_str(dataset)?; + HctlFormula::try_from_str(&formula) +} /// Encode binarized observation with a formula depicting the corresponding state/sub-space. /// Using binarized values and proposition names, creates a conjunction of literals /// describing that observation. /// /// `00*1*1` would end up like `!v1 & !v2 & v4 & v6` -pub fn encode_observation( +fn encode_observation_str( observation: &Observation, prop_names: &[String], ) -> Result { @@ -35,13 +69,13 @@ pub fn encode_observation( /// Encode several observation vectors with conjunction formulae, one by one. /// Also see [encode_observation] for details. -pub fn encode_multiple_observations( +fn encode_multiple_observations_str( observations: &[Observation], prop_names: &[String], ) -> Result, String> { observations .iter() - .map(|o| encode_observation(o, prop_names)) + .map(|o| encode_observation_str(o, prop_names)) .collect::, String>>() } @@ -55,14 +89,14 @@ pub fn encode_multiple_observations( /// (see [mk_formula_fixed_point_set]) that ensures each observation correspond to a fixed point. /// c) Attractor dataset is encoded as a conjunction of "attractor formulas", /// (see [mk_formula_attractor_set]) that ensures each observation correspond to an attractor. -pub fn encode_dataset_hctl(observation_list: &Dataset) -> Result { +fn encode_dataset_hctl_str(observation_list: &Dataset) -> Result { let variables_strings = observation_list .variables() .iter() .map(|v| v.to_string()) .collect::>(); let encoded_observations = - encode_multiple_observations(observation_list.observations(), &variables_strings)?; + encode_multiple_observations_str(observation_list.observations(), &variables_strings)?; match observation_list.category() { DataCategory::Attractor => Ok(mk_formula_attractor_set(&encoded_observations)), DataCategory::FixedPoint => Ok(mk_formula_fixed_point_set(&encoded_observations)), @@ -276,8 +310,8 @@ pub fn mk_formula_reachability_chain(states_sequence: &[String]) -> String { #[cfg(test)] mod tests { + use super::*; use crate::sketchbook::observations::{DataCategory, Dataset, Observation}; - use crate::sketchbook::properties::dynamic_props::_mk_hctl_formulas::*; #[test] /// Test encoding of an observation. @@ -292,18 +326,27 @@ mod tests { let obs1 = Observation::try_from_str("001*1", "o1").unwrap(); let encoded1 = "(~a & ~b & c & e)"; - assert_eq!(encode_observation(&obs1, &prop_names).unwrap(), encoded1); + assert_eq!( + encode_observation_str(&obs1, &prop_names).unwrap(), + encoded1 + ); let obs2 = Observation::try_from_str("001**", "o2").unwrap(); let encoded2 = "(~a & ~b & c)"; - assert_eq!(encode_observation(&obs2, &prop_names).unwrap(), encoded2); + assert_eq!( + encode_observation_str(&obs2, &prop_names).unwrap(), + encoded2 + ); let obs3 = Observation::try_from_str("*****", "o3").unwrap(); let encoded3 = "(true)"; - assert_eq!(encode_observation(&obs3, &prop_names).unwrap(), encoded3); + assert_eq!( + encode_observation_str(&obs3, &prop_names).unwrap(), + encoded3 + ); let multiple_encoded = - encode_multiple_observations(&vec![obs1, obs2, obs3], &prop_names).unwrap(); + encode_multiple_observations_str(&vec![obs1, obs2, obs3], &prop_names).unwrap(); assert_eq!(multiple_encoded, vec![encoded1, encoded2, encoded3]); } @@ -321,7 +364,7 @@ mod tests { DataCategory::Attractor, ); assert_eq!( - &encode_dataset_hctl(&attr_observations.unwrap()).unwrap(), + &encode_dataset_hctl_str(&attr_observations.unwrap()).unwrap(), "((3{x}: (@{x}: ((a & b & ~c) & (AG EF ((a & b & ~c) & {x}))))) & (3{x}: (@{x}: ((a & c) & (AG EF ((a & c) & {x}))))))", ); @@ -331,7 +374,7 @@ mod tests { DataCategory::FixedPoint, ); assert_eq!( - &encode_dataset_hctl(&fixed_point_observations.unwrap()).unwrap(), + &encode_dataset_hctl_str(&fixed_point_observations.unwrap()).unwrap(), "((3{x}: (@{x}: ((a & b & ~c) & (AX ((a & b & ~c) & {x}))))) & (3{x}: (@{x}: ((a & c) & (AX ((a & c) & {x}))))))", ); @@ -341,7 +384,7 @@ mod tests { DataCategory::TimeSeries, ); assert_eq!( - &encode_dataset_hctl(&time_series_observations.unwrap()).unwrap(), + &encode_dataset_hctl_str(&time_series_observations.unwrap()).unwrap(), "(3{x}: (@{x}: ((a & b & ~c)) & EF ((a & c))))", ); @@ -350,7 +393,7 @@ mod tests { var_names.clone(), DataCategory::Unspecified, ); - assert!(encode_dataset_hctl(&unspecified_observations.unwrap()).is_err()); + assert!(encode_dataset_hctl_str(&unspecified_observations.unwrap()).is_err()); } #[test] diff --git a/src-tauri/src/algorithms/eval_dynamic/eval.rs b/src-tauri/src/algorithms/eval_dynamic/eval.rs index a16e458..705e8bc 100644 --- a/src-tauri/src/algorithms/eval_dynamic/eval.rs +++ b/src-tauri/src/algorithms/eval_dynamic/eval.rs @@ -3,6 +3,9 @@ use crate::sketchbook::properties::DynProperty; use biodivine_hctl_model_checker::model_checking::model_check_formula_dirty; use biodivine_lib_param_bn::symbolic_async_graph::{GraphColors, SymbolicAsyncGraph}; +/// Evaluate given dynamic property given the transition graph. +/// +/// TODO: We still need to handle template properties. pub fn eval_dyn_prop( dyn_prop: DynProperty, graph: &SymbolicAsyncGraph, diff --git a/src-tauri/src/algorithms/eval_dynamic/mod.rs b/src-tauri/src/algorithms/eval_dynamic/mod.rs index c2f9004..e047b54 100644 --- a/src-tauri/src/algorithms/eval_dynamic/mod.rs +++ b/src-tauri/src/algorithms/eval_dynamic/mod.rs @@ -1,3 +1,5 @@ +/// Encode data and template properties into HCTL. +pub mod encode; /// Evaluate all kinds of dynamic properties. pub mod eval; /// Prepare graph and symbolic context to handle all dynamic properties. diff --git a/src-tauri/src/algorithms/eval_dynamic/prepare_graph.rs b/src-tauri/src/algorithms/eval_dynamic/prepare_graph.rs index b60447c..ca8153f 100644 --- a/src-tauri/src/algorithms/eval_dynamic/prepare_graph.rs +++ b/src-tauri/src/algorithms/eval_dynamic/prepare_graph.rs @@ -8,13 +8,15 @@ use biodivine_lib_param_bn::BooleanNetwork; use std::cmp::max; use std::collections::HashMap; +/// Prepare the symbolic context and generate the symbolic transition graph for +/// evaluation of the static properties. +/// +/// TODO: we now only consider generic HCTL properties (other template variants might need special treatment too) pub fn prepare_graph_for_dynamic( bn: &BooleanNetwork, dyn_props: &Vec, unit: Option<(&Bdd, &SymbolicContext)>, ) -> Result { - // todo: we now only consider generic HCTL properties (other template variants might need special treatment too) - let mut num_hctl_vars = 0; let plain_context = SymbolicContext::new(bn).unwrap(); for prop in dyn_props { @@ -32,6 +34,9 @@ pub fn prepare_graph_for_dynamic( get_hctl_extended_symbolic_graph(bn, num_hctl_vars as u16, unit) } +/// Prepare the symbolic context and generate the symbolic transition graph for +/// evaluation of HCTL formulas. This means we need to prepare symbolic variables to +/// cover all variables in these HCTL formulas. fn get_hctl_extended_symbolic_graph( bn: &BooleanNetwork, num_hctl_vars: u16, diff --git a/src-tauri/src/algorithms/eval_static/encode.rs b/src-tauri/src/algorithms/eval_static/encode.rs new file mode 100644 index 0000000..e18f994 --- /dev/null +++ b/src-tauri/src/algorithms/eval_static/encode.rs @@ -0,0 +1,267 @@ +use crate::algorithms::fo_logic::utils::get_implicit_function_name; +use crate::sketchbook::model::Essentiality; +use crate::sketchbook::model::Monotonicity; + +use biodivine_lib_param_bn::BooleanNetwork; + +/// Create a FOL formula encoding that a regulation has given monotonicity. +pub fn encode_regulation_monotonicity( + input: &str, + target: &str, + monotonicity: Monotonicity, + bn: &BooleanNetwork, +) -> String { + let target_var = bn.as_graph().find_variable(target).unwrap(); + let input_var = bn.as_graph().find_variable(input).unwrap(); + let regulators = bn.regulators(target_var); + + let number_inputs = regulators.len(); + let index = regulators.iter().position(|var| *var == input_var).unwrap(); + + let fn_name = get_implicit_function_name(target); + encode_monotonicity(number_inputs, index, &fn_name, monotonicity) +} + +/// Create a FOL formula encoding that a regulation has given essentiality. +pub fn encode_regulation_essentiality( + input: &str, + target: &str, + essentiality: Essentiality, + bn: &BooleanNetwork, +) -> String { + let target_var = bn.as_graph().find_variable(target).unwrap(); + let input_var = bn.as_graph().find_variable(input).unwrap(); + let regulators = bn.regulators(target_var); + + let number_inputs = regulators.len(); + let index = regulators.iter().position(|var| *var == input_var).unwrap(); + let fn_name = get_implicit_function_name(target); + + encode_essentiality(number_inputs, index, &fn_name, essentiality) +} + +/// Create a FOL formula encoding that uninterpreted function's argument (given by the index) +/// has given monotonicity. +pub fn encode_monotonicity( + number_inputs: usize, + index: usize, + fn_name: &str, + monotonicity: Monotonicity, +) -> String { + assert!(index < number_inputs); + + if let Monotonicity::Unknown = monotonicity { + return "true".to_string(); + } + + if let Monotonicity::Dual = monotonicity { + // dual regulation is not an activation nor inhibition + let activation = + encode_monotonicity(number_inputs, index, fn_name, Monotonicity::Activation); + let inhibition = + encode_monotonicity(number_inputs, index, fn_name, Monotonicity::Inhibition); + return format!("!({activation}) & !({inhibition})"); + } + + let mut quantifier_args = String::new(); + let mut left_fn_args = String::new(); + let mut right_fn_args = String::new(); + for i in 0..number_inputs { + if i == index { + match monotonicity { + Monotonicity::Activation => { + left_fn_args.push_str("0, "); + right_fn_args.push_str("1, "); + } + Monotonicity::Inhibition => { + left_fn_args.push_str("1, "); + right_fn_args.push_str("0, "); + } + _ => unreachable!(), // Dual and Unknown monotonicities are handled before + } + } else { + quantifier_args.push_str(format!("x_{i}, ").as_str()); + left_fn_args.push_str(format!("x_{i}, ").as_str()); + right_fn_args.push_str(format!("x_{i}, ").as_str()); + } + } + left_fn_args = left_fn_args.strip_suffix(", ").unwrap().to_string(); + right_fn_args = right_fn_args.strip_suffix(", ").unwrap().to_string(); + + if number_inputs > 1 { + quantifier_args = quantifier_args.strip_suffix(", ").unwrap().to_string(); + format!( + "\\forall {quantifier_args}: {fn_name}({left_fn_args}) => {fn_name}({right_fn_args})" + ) + } else { + // no quantified variables + format!("{fn_name}({left_fn_args}) => {fn_name}({right_fn_args})") + } +} + +/// Create a FOL formula encoding that uninterpreted function's argument (given by the index) +/// has given essentiality. +pub fn encode_essentiality( + number_inputs: usize, + index: usize, + fn_name: &str, + essentiality: Essentiality, +) -> String { + assert!(index < number_inputs); + + if let Essentiality::Unknown = essentiality { + return "true".to_string(); + } + + let mut quantifier_args = String::new(); + let mut left_fn_args = String::new(); + let mut right_fn_args = String::new(); + for i in 0..number_inputs { + if i == index { + left_fn_args.push_str("0, "); + right_fn_args.push_str("1, "); + } else { + quantifier_args.push_str(format!("x_{i}, ").as_str()); + left_fn_args.push_str(format!("x_{i}, ").as_str()); + right_fn_args.push_str(format!("x_{i}, ").as_str()); + } + } + left_fn_args = left_fn_args.strip_suffix(", ").unwrap().to_string(); + right_fn_args = right_fn_args.strip_suffix(", ").unwrap().to_string(); + + let formula = if number_inputs > 1 { + quantifier_args = quantifier_args.strip_suffix(", ").unwrap().to_string(); + format!( + "\\exists {quantifier_args}: {fn_name}({left_fn_args}) ^ {fn_name}({right_fn_args})" + ) + } else { + // no quantified variables + format!("{fn_name}({left_fn_args}) ^ {fn_name}({right_fn_args})") + }; + + match essentiality { + Essentiality::True => formula, + Essentiality::False => format!("!({formula})"), + Essentiality::Unknown => unreachable!(), // handled before + } +} + +/// Create a FOL formula encoding that particular formula must hold if "context" formula holds. +pub fn encode_property_in_context(context_formula: &str, property_formula: &str) -> String { + format!("{context_formula} => {property_formula}") +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + /// Test encoding of monotonicity for regulations. + fn test_encoding_regulation_monotonicity() { + // dummy model, just to have list of variables and numbers of regulators (types of regulations dont matter) + let aeon_str = r#" + A -? B + B -> C + A -? C + C -| C + "#; + let bn = BooleanNetwork::try_from(aeon_str).unwrap(); + + // encode that regulation B -> C is positive + let fol_formula = encode_regulation_monotonicity("B", "C", Monotonicity::Activation, &bn); + let expected = "\\forall x_0, x_2: f_C(x_0, 0, x_2) => f_C(x_0, 1, x_2)"; + assert_eq!(&fol_formula, expected); + + // encode that regulation C -| C is negative + let fol_formula = encode_regulation_monotonicity("C", "C", Monotonicity::Inhibition, &bn); + let expected = "\\forall x_0, x_1: f_C(x_0, x_1, 1) => f_C(x_0, x_1, 0)"; + assert_eq!(&fol_formula, expected); + + // encode that regulation A -| B is unknown + let fol_formula = encode_regulation_monotonicity("A", "B", Monotonicity::Unknown, &bn); + let expected = "true"; + assert_eq!(&fol_formula, expected); + + // encode that regulation A -* C is dual + let fol_formula = encode_regulation_monotonicity("A", "C", Monotonicity::Dual, &bn); + let expected = "!(\\forall x_1, x_2: f_C(0, x_1, x_2) => f_C(1, x_1, x_2)) & !(\\forall x_1, x_2: f_C(1, x_1, x_2) => f_C(0, x_1, x_2))"; + assert_eq!(&fol_formula, expected); + } + + #[test] + /// Test encoding of essentiality for regulations. + fn test_encoding_regulation_essentiality() { + // dummy model, just to have list of variables and numbers of regulators (types of regulations dont matter) + let aeon_str = r#" + A ->? B + B -> C + A -> C + C -| C + "#; + let bn = BooleanNetwork::try_from(aeon_str).unwrap(); + + // encode that regulation B -> C is essential + let fol_formula = encode_regulation_essentiality("B", "C", Essentiality::True, &bn); + let expected = "\\exists x_0, x_2: f_C(x_0, 0, x_2) ^ f_C(x_0, 1, x_2)"; + assert_eq!(&fol_formula, expected); + + // encode that regulation C -| C has no effect (hypothetical) + let fol_formula = encode_regulation_essentiality("C", "C", Essentiality::False, &bn); + let expected = "!(\\exists x_0, x_1: f_C(x_0, x_1, 0) ^ f_C(x_0, x_1, 1))"; + assert_eq!(&fol_formula, expected); + + // encode that regulation A ->? B has unknown essentiality + let fol_formula = encode_regulation_essentiality("A", "B", Essentiality::Unknown, &bn); + let expected = "true"; + assert_eq!(&fol_formula, expected); + } + + #[test] + /// Test encoding of uninterpreted function monotonicity. + fn test_encoding_fn_monotonicity() { + // encode that fn "f" is positively monotonic in second of three inputs + let fol_formula = encode_monotonicity(3, 1, "f", Monotonicity::Activation); + let expected = "\\forall x_0, x_2: f(x_0, 0, x_2) => f(x_0, 1, x_2)"; + assert_eq!(&fol_formula, expected); + + // encode that fn "g" is negatively monotonic in its only input + let fol_formula = encode_monotonicity(1, 0, "g", Monotonicity::Activation); + let expected = "g(0) => g(1)"; + assert_eq!(&fol_formula, expected); + + // encode that fn "h" is dual in second of two inputs + let fol_formula = encode_monotonicity(2, 1, "h", Monotonicity::Dual); + let expected = + "!(\\forall x_0: h(x_0, 0) => h(x_0, 1)) & !(\\forall x_0: h(x_0, 1) => h(x_0, 0))"; + assert_eq!(&fol_formula, expected); + + // encode unknown monotonicity + let fol_formula = encode_monotonicity(3, 1, "f", Monotonicity::Unknown); + let expected = "true"; + assert_eq!(&fol_formula, expected); + } + + #[test] + /// Test encoding of uninterpreted fn essentiality. + fn test_encoding_fn_essentiality() { + // encode that fn "f" is positively monotonic in second of three inputs + let fol_formula = encode_essentiality(3, 1, "f", Essentiality::True); + let expected = "\\exists x_0, x_2: f(x_0, 0, x_2) ^ f(x_0, 1, x_2)"; + assert_eq!(&fol_formula, expected); + + // encode that fn "g" is negatively monotonic in its only input + let fol_formula = encode_essentiality(1, 0, "g", Essentiality::True); + let expected = "g(0) ^ g(1)"; + assert_eq!(&fol_formula, expected); + + // encode that input has no effect (hypothetical) + let fol_formula = encode_essentiality(1, 0, "g", Essentiality::False); + let expected = "!(g(0) ^ g(1))"; + assert_eq!(&fol_formula, expected); + + // encode unknown monotonicity + let fol_formula = encode_essentiality(3, 1, "f", Essentiality::Unknown); + let expected = "true"; + assert_eq!(&fol_formula, expected); + } +} diff --git a/src-tauri/src/algorithms/eval_static/eval.rs b/src-tauri/src/algorithms/eval_static/eval.rs index f018305..20cfc3a 100644 --- a/src-tauri/src/algorithms/eval_static/eval.rs +++ b/src-tauri/src/algorithms/eval_static/eval.rs @@ -1,27 +1,20 @@ use crate::algorithms::fo_logic::eval_wrappers::eval_formula_dirty; -use crate::sketchbook::bn_utils; -use crate::sketchbook::model::Monotonicity; use crate::sketchbook::properties::static_props::StatPropertyType; use crate::sketchbook::properties::StatProperty; -use biodivine_lib_bdd::Bdd; use biodivine_lib_param_bn::biodivine_std::traits::Set; -use biodivine_lib_param_bn::symbolic_async_graph::{ - GraphColors, RegulationConstraint, SymbolicAsyncGraph, SymbolicContext, -}; -use biodivine_lib_param_bn::BooleanNetwork; +use biodivine_lib_param_bn::symbolic_async_graph::{GraphColors, SymbolicAsyncGraph}; +/// Evaluate given static property. +/// +/// Currently, we assume that all types of template properties must be already translated +/// to FOL generic properties. pub fn eval_static_prop( static_prop: StatProperty, - network: &BooleanNetwork, graph: &SymbolicAsyncGraph, base_var_name: &str, ) -> Result { - // look into https://github.com/sybila/biodivine-lib-param-bn/blob/master/src/symbolic_async_graph/_impl_regulation_constraint.rs - - let context = graph.symbolic_context(); // there might be some constraints already, and we only want to consider colors satisfying these too let initial_unit_colors = graph.mk_unit_colors(); - let unit_bdd = initial_unit_colors.as_bdd(); match static_prop.get_prop_data() { StatPropertyType::GenericStatProp(prop) => { @@ -29,84 +22,7 @@ pub fn eval_static_prop( let results = eval_formula_dirty(&formula, graph, base_var_name)?; Ok(results.colors().intersect(&initial_unit_colors)) } - StatPropertyType::FnInputEssential(_prop) => todo!(), - StatPropertyType::RegulationEssential(prop) => { - // For each variable, compute Bdd that is true exactly when its update function is true. - let update_function_is_true: Vec = mk_all_updates_true(context, network); - - let input_name = prop.clone().input.unwrap(); - let target_name = prop.clone().target.unwrap(); - let input_var = network - .as_graph() - .find_variable(input_name.as_str()) - .unwrap(); - let target_var = network - .as_graph() - .find_variable(target_name.as_str()) - .unwrap(); - - let fn_is_true = &update_function_is_true[target_var.to_index()]; - - let observable = bn_utils::essentiality_to_bool(prop.value); - let observability = if observable { - RegulationConstraint::mk_observability(context, fn_is_true, input_var) - } else { - context.mk_constant(true) - }; - let valid_colors = GraphColors::new(observability.and(unit_bdd), context); - Ok(valid_colors) - } - StatPropertyType::RegulationEssentialContext(_prop) => todo!(), - StatPropertyType::RegulationMonotonic(prop) => { - // For each variable, compute Bdd that is true exactly when its update function is true. - let update_function_is_true: Vec = mk_all_updates_true(context, network); - - let input_name = prop.clone().input.unwrap(); - let target_name = prop.clone().target.unwrap(); - let input_var = network - .as_graph() - .find_variable(input_name.as_str()) - .unwrap(); - let target_var = network - .as_graph() - .find_variable(target_name.as_str()) - .unwrap(); - - let fn_is_true = &update_function_is_true[target_var.to_index()]; - - let monotonicity = match prop.value { - Monotonicity::Activation => { - RegulationConstraint::mk_activation(context, fn_is_true, input_var) - } - Monotonicity::Inhibition => { - RegulationConstraint::mk_inhibition(context, fn_is_true, input_var) - } - Monotonicity::Unknown => context.mk_constant(true), - Monotonicity::Dual => unimplemented!(), - }; - let valid_colors = GraphColors::new(monotonicity.and(unit_bdd), context); - Ok(valid_colors) - } - StatPropertyType::RegulationMonotonicContext(_prop) => todo!(), - StatPropertyType::FnInputEssentialContext(_prop) => todo!(), - StatPropertyType::FnInputMonotonic(_prop) => todo!(), - StatPropertyType::FnInputMonotonicContext(_prop) => todo!(), + // currently, all other types of properties must be translated to FOL generic properties + _ => unreachable!(), } } - -/// For each variable, compute Bdd that is true exactly when its update function is true. -/// -/// This covers both the case when a variable has some update expression, and the case when -/// it has "implicit" (empty) update function. -fn mk_all_updates_true(context: &SymbolicContext, network: &BooleanNetwork) -> Vec { - network - .variables() - .map(|variable| { - if let Some(function) = network.get_update_function(variable) { - context.mk_fn_update_true(function) - } else { - context.mk_implicit_function_is_true(variable, &network.regulators(variable)) - } - }) - .collect() -} diff --git a/src-tauri/src/algorithms/eval_static/mod.rs b/src-tauri/src/algorithms/eval_static/mod.rs index 856a68f..304edb4 100644 --- a/src-tauri/src/algorithms/eval_static/mod.rs +++ b/src-tauri/src/algorithms/eval_static/mod.rs @@ -1,3 +1,5 @@ +/// Encode regulations and template properties into FOL. +pub mod encode; /// Evaluate all kinds static properties. pub mod eval; /// Prepare graph and symbolic context to handle all static properties. diff --git a/src-tauri/src/algorithms/eval_static/prepare_graph.rs b/src-tauri/src/algorithms/eval_static/prepare_graph.rs index 9d7f60a..2d70812 100644 --- a/src-tauri/src/algorithms/eval_static/prepare_graph.rs +++ b/src-tauri/src/algorithms/eval_static/prepare_graph.rs @@ -8,13 +8,19 @@ use biodivine_lib_param_bn::BooleanNetwork; use std::cmp::max; use std::collections::HashMap; +/// Prepare the symbolic context and generate the symbolic transition graph for +/// evaluation of the static properties. +/// +/// Since all the static properties are encoded as FOL properties, we just need to +/// handle this case. This means we need to prepare symbolic variables to cover all +/// variables in FOL formulas. pub fn prepare_graph_for_static( bn: &BooleanNetwork, static_props: &Vec, base_var_name: &str, unit: Option<(&Bdd, &SymbolicContext)>, ) -> Result { - // todo: we now only consider generic FOL properties (other template variants might need special treatment too) + // we now assume all properties are already encoded into generic FOL properties let mut num_fol_vars: usize = 0; //let plain_context = SymbolicContext::new(&bn).unwrap(); @@ -26,15 +32,16 @@ pub fn prepare_graph_for_static( let num_tree_vars = collect_unique_fol_vars(&tree).len(); num_fol_vars = max(num_fol_vars, num_tree_vars); } - StatPropertyType::RegulationMonotonic(..) - | StatPropertyType::RegulationEssential(..) => {} - _ => todo!(), + _ => unreachable!(), // all properties should be encoded in FOL by now } } get_fol_extended_symbolic_graph(bn, num_fol_vars as u16, base_var_name, unit) } +/// Prepare the symbolic context and generate the symbolic transition graph for +/// evaluation of FOL formulas. This means we need to prepare symbolic variables to +/// cover all variables in FOL formulas. fn get_fol_extended_symbolic_graph( bn: &BooleanNetwork, num_fol_vars: u16, diff --git a/src-tauri/src/algorithms/fo_logic/eval_algorithm.rs b/src-tauri/src/algorithms/fo_logic/eval_algorithm.rs index 8409440..51f0c61 100644 --- a/src-tauri/src/algorithms/fo_logic/eval_algorithm.rs +++ b/src-tauri/src/algorithms/fo_logic/eval_algorithm.rs @@ -35,7 +35,6 @@ pub fn eval_node(node: FolTreeNode, graph: &SymbolicAsyncGraph) -> GraphColoredV let name = fn_symbol.name; let arguments = arguments.into_iter().map(|a| *a).collect(); if fn_symbol.is_update_fn { - // todo - properly finish update function symbol evaluation eval_applied_update_function(graph, &name, arguments) } else { eval_applied_uninterpred_function(graph, &name, arguments) @@ -151,8 +150,8 @@ fn eval_applied_update_function( } eval_node(converted_update_fn, graph) } else { - // we can evaluate the function normally as any other uninterpreted fn - eval_applied_uninterpred_function(graph, fn_name, arguments) + // we already made sure that empty functions are substituted with expressions "f_v_N(regulator1, ..., regulatorM)" + unreachable!() } } diff --git a/src-tauri/src/algorithms/fo_logic/parser.rs b/src-tauri/src/algorithms/fo_logic/parser.rs index 090453b..8539e61 100644 --- a/src-tauri/src/algorithms/fo_logic/parser.rs +++ b/src-tauri/src/algorithms/fo_logic/parser.rs @@ -186,8 +186,8 @@ fn parse_8_terms_and_parentheses(tokens: &[FolToken]) -> Result { return Ok(FolTreeNode::mk_variable(name.as_str())); diff --git a/src-tauri/src/algorithms/fo_logic/tokenizer.rs b/src-tauri/src/algorithms/fo_logic/tokenizer.rs index ed50dfc..a3dc800 100644 --- a/src-tauri/src/algorithms/fo_logic/tokenizer.rs +++ b/src-tauri/src/algorithms/fo_logic/tokenizer.rs @@ -319,10 +319,11 @@ fn print_tokens_recursively(tokens: &Vec) { FolToken::TokenList(token_vec) => print_tokens_recursively(token_vec), FolToken::Function(name, args) => { print!("{name}("); - for arg in args { + for (idx, arg) in args.iter().enumerate() { print_tokens_recursively(&vec![arg.clone()]); - // todo: one more "," than needed - print!(",") + if idx < args.len() { + print!(",") + } } print!(")") } diff --git a/src-tauri/src/algorithms/fo_logic/utils.rs b/src-tauri/src/algorithms/fo_logic/utils.rs index 6ed1f61..df0726c 100644 --- a/src-tauri/src/algorithms/fo_logic/utils.rs +++ b/src-tauri/src/algorithms/fo_logic/utils.rs @@ -1,6 +1,5 @@ use crate::algorithms::fo_logic::fol_tree::{FolTreeNode, NodeType}; use crate::algorithms::fo_logic::operator_enums::*; -use crate::sketchbook::ids::VarId; use biodivine_lib_param_bn::symbolic_async_graph::{SymbolicAsyncGraph, SymbolicContext}; use biodivine_lib_param_bn::BooleanNetwork; use regex::Regex; @@ -262,9 +261,9 @@ pub fn is_update_fn_symbol(fn_symbol: &str) -> bool { /// Compute a valid name for an "anonymous update function" of the corresponding variable. /// -/// todo: does not double check if there are collisions with existing params -pub fn get_implicit_function_name(variable: &VarId) -> String { - format!("f_{}", variable.as_str()) +/// TODO: currently does not double check if there are collisions with existing params +pub fn get_implicit_function_name(variable_name: &str) -> String { + format!("f_{}", variable_name) } /// Check that extended symbolic graph's BDD supports given extra variable. diff --git a/src-tauri/src/analysis/analysis_state.rs b/src-tauri/src/analysis/analysis_state.rs index f12979f..2196106 100644 --- a/src-tauri/src/analysis/analysis_state.rs +++ b/src-tauri/src/analysis/analysis_state.rs @@ -101,8 +101,15 @@ impl AnalysisState { pub fn initiate_reset(&mut self) { if let Some(solver) = &self.solver { let solver: Arc> = Arc::clone(solver); + + // there are two ways to cancel the computation + // 1) when we set the communiaction channel (receiver_channel) to None, the solver + // recognizes that and finishes + // 2) we spawn a thread to wait to achieve this "write lock" to the solver, and send it + // cancel flag directly + + // this corresponds to method 2, which is now just a backup mechanism to 1 tokio::spawn(async move { - // todo: currently, we wait to achieve this "write lock" until whole inference finishes... solver.write().await.cancel(); }); } diff --git a/src-tauri/src/analysis/inference_solver.rs b/src-tauri/src/analysis/inference_solver.rs index f2e38c3..f27e22a 100644 --- a/src-tauri/src/analysis/inference_solver.rs +++ b/src-tauri/src/analysis/inference_solver.rs @@ -1,10 +1,13 @@ use crate::algorithms::eval_dynamic::eval::eval_dyn_prop; use crate::algorithms::eval_dynamic::prepare_graph::prepare_graph_for_dynamic; +use crate::algorithms::eval_static::encode::*; use crate::algorithms::eval_static::eval::eval_static_prop; use crate::algorithms::eval_static::prepare_graph::prepare_graph_for_static; +use crate::algorithms::fo_logic::utils::get_implicit_function_name; use crate::analysis::analysis_results::AnalysisResults; use crate::analysis::analysis_type::AnalysisType; use crate::debug; +use crate::sketchbook::properties::static_props::StatPropertyType; use crate::sketchbook::properties::{DynProperty, StatProperty}; use crate::sketchbook::Sketch; use biodivine_lib_param_bn::symbolic_async_graph::{ @@ -275,9 +278,7 @@ impl InferenceSolver { /// The results are saved to sepcific fields of the provided solver and can be retrieved later. /// They are also returned, which is now used for logging later. /// - /// TODO: This is only a prototype, and considers just parts of the sketch that are easy to - /// process at the moment. Some parts are lost, including "dual regulations", some kinds of - /// static properties, all but generic dynamic properties. + /// TODO: Some parts (like evaluation for template dynamic properties) are still not implemented. pub async fn run_inference_async( solver: Arc>, sketch: Sketch, @@ -288,13 +289,9 @@ impl InferenceSolver { solver.check_cancellation()?; // Early check before starting } - /* Todo: Currently, we use this "write lock" to lock the solver for the whole inference. - * This makes it impossible to acquire a lock for cancellation or to do intermediate result fetch. - * We should make the inference modular in an async sense, so that proper cancellation is possible. - * - * There is currently a workaround for this - we use a channel for sending progress messages, and - * we can use the (non-)existence of the channel as a way to know if the computation was cancelled. - */ + // Currently, we use this "write lock" to lock the solver for the whole inference. + // This works since for sending progress messages we dont need a lock - we use a communication channel. + // Tthe (non-)existence of the channel as a way to know if the computation was cancelled. let mut solver_write = solver.write().await; let results = match analysis_type { @@ -331,18 +328,106 @@ impl InferenceSolver { } /// Extract and convert relevant components from the sketch (boolean network, properties). - fn extract_inputs( + /// + /// Translates all static properties into Generic properties (by encoding the property to FOL). + /// + /// TODO: Processing for template dynamic properties is still not implemented. + fn process_inputs( sketch: Sketch, ) -> Result<(BooleanNetwork, Vec, Vec), String> { - // todo: at the moment we just use HCTL dynamic properties, and automatically generated regulation properties - let bn = sketch.model.to_bn_with_plain_regulations(None); // remove all unused function symbols, as these would cause problems later - let bn = bn.prune_unused_parameters(); + let mut bn = bn.prune_unused_parameters(); + // add expressions "f_var_N(regulator_1, ..., regulator_M)" instead of all empty updates + // this gets us rid of "implicit" update functions, and we can only eval "explicit" parameters + for var in bn.variables().clone() { + if bn.get_update_function(var).is_none() { + let var_name = bn.get_variable_name(var).clone(); + let fn_name = get_implicit_function_name(&var_name); + let inputs = bn.regulators(var); + bn.add_parameter(&fn_name, inputs.len() as u32).unwrap(); + let input_str = inputs + .iter() + .map(|v| bn.get_variable_name(*v).clone()) + .collect::>() + .join(", "); + let update_fn_str = format!("{fn_name}({input_str})"); + bn.add_string_update_function(&var_name, &update_fn_str) + .unwrap(); + } + } let mut static_props = vec![]; for (id, stat_prop) in sketch.properties.stat_props() { - static_props.push((id, stat_prop.clone())); + let name = stat_prop.get_name(); + + // currently, everything is encoded into first-order logic (into a "generic" property) + let stat_prop_processed = match stat_prop.get_prop_data() { + StatPropertyType::GenericStatProp(..) => stat_prop.clone(), + StatPropertyType::RegulationEssential(prop) + | StatPropertyType::RegulationEssentialContext(prop) => { + let input_name = prop.input.clone().unwrap(); + let target_name = prop.target.clone().unwrap(); + let mut formula = encode_regulation_essentiality( + input_name.as_str(), + target_name.as_str(), + prop.clone().value, + &bn, + ); + if let Some(context_formula) = &prop.context { + formula = encode_property_in_context(context_formula, &formula); + } + StatProperty::mk_generic(name, &formula)? + } + StatPropertyType::RegulationMonotonic(prop) + | StatPropertyType::RegulationMonotonicContext(prop) => { + let input_name = prop.input.clone().unwrap(); + let target_name = prop.target.clone().unwrap(); + let mut formula = encode_regulation_monotonicity( + input_name.as_str(), + target_name.as_str(), + prop.clone().value, + &bn, + ); + if let Some(context_formula) = &prop.context { + formula = encode_property_in_context(context_formula, &formula); + } + StatProperty::mk_generic(name, &formula)? + } + StatPropertyType::FnInputEssential(prop) + | StatPropertyType::FnInputEssentialContext(prop) => { + let fn_id = prop.target.clone().unwrap(); + let input_idx = prop.input_index.unwrap(); + let number_inputs = sketch.model.get_uninterpreted_fn_arity(&fn_id)?; + let mut formula = encode_essentiality( + number_inputs, + input_idx, + fn_id.as_str(), + prop.clone().value, + ); + if let Some(context_formula) = &prop.context { + formula = encode_property_in_context(context_formula, &formula); + } + StatProperty::mk_generic(name, &formula)? + } + StatPropertyType::FnInputMonotonic(prop) + | StatPropertyType::FnInputMonotonicContext(prop) => { + let fn_id = prop.target.clone().unwrap(); + let input_idx = prop.input_index.unwrap(); + let number_inputs = sketch.model.get_uninterpreted_fn_arity(&fn_id)?; + let mut formula = encode_monotonicity( + number_inputs, + input_idx, + fn_id.as_str(), + prop.clone().value, + ); + if let Some(context_formula) = &prop.context { + formula = encode_property_in_context(context_formula, &formula); + } + StatProperty::mk_generic(name, &formula)? + } + }; + static_props.push((id, stat_prop_processed)) } let mut dynamic_props = vec![]; @@ -361,14 +446,11 @@ impl InferenceSolver { /// Evaluate previously collected static properties, and restrict the unit set of the /// graph to the set of valid colors. - /// - /// TODO: function `eval_static_prop` needs to be finished. fn eval_static(&mut self, base_var_name: &str) -> Result<(), String> { for stat_property in self.stat_props()?.clone() { self.check_cancellation()?; // check if cancellation flag was set during computation - let inferred_colors = - eval_static_prop(stat_property, self.bn()?, self.graph()?, base_var_name)?; + let inferred_colors = eval_static_prop(stat_property, self.graph()?, base_var_name)?; let colored_vertices = GraphColoredVertices::new( inferred_colors.into_bdd(), self.graph()?.symbolic_context(), @@ -405,9 +487,7 @@ impl InferenceSolver { /// Internal modular variant of the inference. You can choose which parts to select. /// For example, you can only consider static properties, only dynamic properties, or all. /// - /// TODO: This is only a prototype, and considers just parts of the sketch that are easy to - /// process at the moment. Some parts are lost, including "dual regulations", some kinds of - /// static properties, all but generic dynamic properties. + /// TODO: Some parts (like evaluation for template dynamic properties) are still not implemented. fn run_inference_modular( &mut self, analysis_type: AnalysisType, @@ -423,7 +503,7 @@ impl InferenceSolver { /* >> STEP 1: process basic components of the sketch to be used */ // this also does few input simplifications, like filtering out unused function symbols from the BN - let (bn, static_props, dynamic_props) = Self::extract_inputs(sketch)?; + let (bn, static_props, dynamic_props) = Self::process_inputs(sketch)?; self.bn = Some(bn); self.static_props = Some(static_props); diff --git a/src-tauri/src/app/state/editor/_state_editor_session.rs b/src-tauri/src/app/state/editor/_state_editor_session.rs index b7d98f1..f452161 100644 --- a/src-tauri/src/app/state/editor/_state_editor_session.rs +++ b/src-tauri/src/app/state/editor/_state_editor_session.rs @@ -35,8 +35,9 @@ impl StackSession for EditorSession { ) -> Result<(Option, Option), DynError> { let path = message.message.path.clone(); - // if the state changed due to message processing, we'll have to reset the undo-redo stack - // do not use messages that make these changes often + // If the state changed due to message processing, we'll have to reset the undo-redo stack + // (but we do not use messages that make these changes often) + // todo: make this `mut` when we have some cases here that could mutate state let reset_stack = false; diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index 0f6db7d..9eb2313 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -11,8 +11,6 @@ use aeon_sketchbook::app::{ }; use aeon_sketchbook::debug; use chrono::prelude::*; -use std::thread; -use std::time::Duration; use tauri::{command, Manager, State, Window}; #[command] @@ -55,53 +53,83 @@ fn main() { events: event.events, }; - // TODO: this part should be probably moved elsewhere, just a placeholder for now // check for "new-session" events here + // TODO: this part should be probably moved elsewhere, just a placeholder for now if action.events.len() == 1 && action.events[0].path == ["new-analysis-session"] { - // prepare (timestamped) session and window instances for AppState - let time_now = Utc::now(); - let timestamp = time_now.timestamp(); - let new_session_id = format!("analysis-{timestamp}"); - let new_window_id = format!("analysis-{timestamp}-window"); - let new_session: DynSession = Box::new(AnalysisSession::new(&new_session_id)); - state.session_created(&new_session_id, new_session); - state.window_created(&new_window_id, &new_session_id); + // This `new-analysis-session` event comes from the Editor with the sketch that will be analyzed. + // Before starting the new analysis session, we run a consistency check on that sketch. + // If the check is successful, we continue creating the session. If not, user should be + // notified about the consistency issues, and we do not create the new session. - // create a new window for the analysis session in tauri - let new_window = tauri::WindowBuilder::new( - &handle, - &new_window_id, - tauri::WindowUrl::App("src/html/analysis.html".into()), - ) - .title(format!( - "Inference Workflow (started on {})", - time_now.to_rfc2822() - )) - .build(); + // Run this event that automatically fails and returns error if sketch is not consistent + let consistency_assert_event = UserAction { + events: vec![Event::build(&["sketch", "assert_consistency"], None)] + }; + if let Err(e) = state.consume_event(&aeon, &session_id, &consistency_assert_event) { + // If sketch was inconsistent, lets first run this event that sends a summary of issues to the frontend. + // This event should not fail, it only updates frontend. + let consistency_check_event = UserAction { + events: vec![Event::build(&["sketch", "check_consistency"], None)] + }; + state.consume_event(&aeon, &session_id, &consistency_check_event).unwrap(); - match new_window { - Ok(_) => debug!( - "New session `{new_session_id}` and window `{new_window_id}` created." - ), - Err(e) => panic!("Failed to create new window: {:?}", e), - } + // Now that user has all the details, lets just log the problem and send an error event to FE + debug!("Error while starting analysis workflow: `{}`.", e.to_string()); + let message = "Sketch is not consistent. See detailed summary in the 'Consistency Check' section."; + // A crude way to escape the error message and wrap it in quotes. + let json_message: String = serde_json::Value::String(message.to_string()).to_string(); + let state_change = StateChange { + events: vec![Event::build(&["error"], Some(&json_message))], + }; + let res_emit = + state.emit_to_session_windows(&aeon, &session_id, state_change); + if let Err(e) = res_emit { + panic!("Event error failed to be sent: {:?}", e); + } + } else { + // If sketch is consistent, we are ready to create the new session. - // todo: add better way - let sleep_duration = Duration::from_millis(1200); - thread::sleep(sleep_duration); + // prepare (timestamped) session and window instances for AppState + let time_now = Utc::now(); + let timestamp = time_now.timestamp(); + let new_session_id = format!("analysis-{timestamp}"); + let new_window_id = format!("analysis-{timestamp}-window"); + let new_session: DynSession = Box::new(AnalysisSession::new(&new_session_id)); + state.session_created(&new_session_id, new_session); + state.window_created(&new_window_id, &new_session_id); - // send request message "from" the new analysis session to the editor session - // asking to transfer Sketch data - let message = SessionMessage { - message: Event::build(&["send_sketch"], None), - }; - let res = - state.consume_message(&aeon, DEFAULT_SESSION_ID, &new_session_id, &message); - if let Err(e) = res { - panic!( - "Failed transferring sketch data from editor to analysis: {:?}", - e - ); + // send request message "from" the new analysis session to the editor session + // asking to transfer Sketch data + let message = SessionMessage { + message: Event::build(&["send_sketch"], None), + }; + let res = + state.consume_message(&aeon, DEFAULT_SESSION_ID, &new_session_id, &message); + if let Err(e) = res { + panic!( + "Failed transferring sketch data from editor to analysis: {:?}", + e + ); + } + + // create a new window for the analysis session in tauri + let new_window = tauri::WindowBuilder::new( + &handle, + &new_window_id, + tauri::WindowUrl::App("src/html/analysis.html".into()), + ) + .title(format!( + "Inference Workflow (started on {})", + time_now.to_rfc2822() + )) + .build(); + + match new_window { + Ok(_) => debug!( + "New session `{new_session_id}` and window `{new_window_id}` created." + ), + Err(e) => panic!("Failed to create new window: {:?}", e), + } } } else { let result = state.consume_event(&aeon, &session_id, &action); diff --git a/src-tauri/src/sketchbook/_sketch/_impl_consistency.rs b/src-tauri/src/sketchbook/_sketch/_impl_consistency.rs new file mode 100644 index 0000000..28d0289 --- /dev/null +++ b/src-tauri/src/sketchbook/_sketch/_impl_consistency.rs @@ -0,0 +1,275 @@ +use crate::sketchbook::ids::{DatasetId, ObservationId, UninterpretedFnId, VarId}; +use crate::sketchbook::properties::dynamic_props::DynPropertyType; +use crate::sketchbook::properties::static_props::StatPropertyType; +use crate::sketchbook::properties::{DynProperty, FirstOrderFormula, HctlFormula, StatProperty}; +use crate::sketchbook::Sketch; + +/// Utilities to perform consistency checks. +impl Sketch { + /// Assert that the sketch is consistent, return error otherwise. + /// See [run_consistency_check] for details on which criteria are checked. + pub fn assert_consistency(&self) -> Result<(), String> { + if self.run_consistency_check().0 { + Ok(()) + } else { + Err("Sketch is not consistent.".to_string()) + } + } + + /// General check that all components of the sketch are consistent together. + /// + /// Note that most of the general consistency (syntax of formulas, check validity and + /// uniqueness of IDs, ..) is enforced automatically when editing the sketch. However, + /// some more complex details are left to be checked (explicitely or before analysis). + /// + /// This should include: + /// - check that model is not empty + /// - check that dataset variables are valid network variables + /// - check that various template properties reference valid variables and data + /// - check that HCTL formulas only use valid variables as atomic propositions + /// - check that FOL formulas only use valid function symbols + pub fn run_consistency_check(&self) -> (bool, String) { + let mut all_consitent = true; + let mut message = String::new(); + + // we divide the code by different components to avoid replication + let componets_results = vec![ + self.check_model(), + self.check_datasets(), + self.check_static(), + self.check_dynamic(), + ]; + + for (consistent, sub_message) in componets_results { + if !consistent { + message += sub_message.as_str(); + message += "\n"; + all_consitent = false; + } + } + + (all_consitent, message) + } + + /// Part of the consistency check responsible for the 'model' component. + /// Returns bool (whether a model is consistent) and formated message with issues. + fn check_model(&self) -> (bool, String) { + let mut consitent = true; + let mut message = String::new(); + message += "MODEL:\n"; + + if self.model.num_vars() == 0 { + consitent = false; + message += "> ISSUE: There must be at least one variable.\n"; + } + + // TODO: in future, we can also add a check whether update fns match regulation monotonicity + + (consitent, message) + } + + /// Part of the consistency check responsible for the 'observations' (datasets) component. + /// Returns bool (whether a datasets are consistent) and formated message with issues. + fn check_datasets(&self) -> (bool, String) { + let mut message = String::new(); + message += "DATASETS:\n"; + + let mut dataset_err_found = false; + for (dataset_id, dataset) in self.observations.datasets() { + // check that all dataset variables are part of the network + for var_id in dataset.variables() { + if !self.model.is_valid_var_id(var_id) { + let issue_var = + format!("Variable {} is not present in the model.", var_id.as_str()); + let issue = format!( + "> ISSUE with dataset `{}`: {issue_var}\n", + dataset_id.as_str() + ); + message += &issue; + dataset_err_found = true; + } + } + } + (!dataset_err_found, message) + } + + /// Part of the consistency check responsible for the 'static properties' component. + /// Returns bool (whether a static properties are consistent) and formated message with issues. + fn check_static(&self) -> (bool, String) { + let mut message = String::new(); + message += "STATIC PROPERTIES:\n"; + + let mut stat_err_found = false; + for (prop_id, prop) in self.properties.stat_props() { + if let Err(e) = self.assert_static_prop_valid(prop) { + message = append_property_issue(&e, prop_id.as_str(), message); + stat_err_found = true; + } + } + (!stat_err_found, message) + } + + /// Part of the consistency check responsible for the 'dynamic properties' component. + /// Returns bool (whether a dynamic properties are consistent) and formated message with issues. + fn check_dynamic(&self) -> (bool, String) { + let mut message = String::new(); + message += "DYNAMIC PROPERTIES:\n"; + + let mut dyn_err_found = false; + for (prop_id, prop) in self.properties.dyn_props() { + if let Err(e) = self.assert_dynamic_prop_valid(prop) { + message = append_property_issue(&e, prop_id.as_str(), message); + dyn_err_found = true; + } + } + (!dyn_err_found, message) + } + + /// Check if all fields of the static property are filled and have valid values. + /// If not, return appropriate message. + fn assert_static_prop_valid(&self, prop: &StatProperty) -> Result<(), String> { + // first just check if all required fields are filled out + prop.assert_fully_filled()?; + + // now, let's validate the fields (we know the required ones are filled in) + match prop.get_prop_data() { + StatPropertyType::GenericStatProp(generic_prop) => { + FirstOrderFormula::check_syntax_with_model(&generic_prop.raw_formula, &self.model)?; + } + StatPropertyType::FnInputEssential(p) + | StatPropertyType::FnInputEssentialContext(p) => { + self.assert_fn_valid(p.target.as_ref().unwrap())?; + self.assert_index_valid(p.input_index.unwrap(), p.target.as_ref().unwrap())?; + self.assert_context_valid_or_none(p.context.as_ref())?; + } + StatPropertyType::FnInputMonotonic(p) + | StatPropertyType::FnInputMonotonicContext(p) => { + self.assert_fn_valid(p.target.as_ref().unwrap())?; + self.assert_index_valid(p.input_index.unwrap(), p.target.as_ref().unwrap())?; + self.assert_context_valid_or_none(p.context.as_ref())?; + } + StatPropertyType::RegulationEssential(p) + | StatPropertyType::RegulationEssentialContext(p) => { + self.assert_var_valid(p.target.as_ref().unwrap())?; + self.assert_var_valid(p.input.as_ref().unwrap())?; + self.assert_context_valid_or_none(p.context.as_ref())?; + } + StatPropertyType::RegulationMonotonic(p) + | StatPropertyType::RegulationMonotonicContext(p) => { + self.assert_var_valid(p.target.as_ref().unwrap())?; + self.assert_var_valid(p.input.as_ref().unwrap())?; + self.assert_context_valid_or_none(p.context.as_ref())?; + } + } + Ok(()) + } + + /// Check if all fields of the dynamic property are filled and have valid values. + /// If not, return appropriate message. + fn assert_dynamic_prop_valid(&self, prop: &DynProperty) -> Result<(), String> { + // first just check if all required fields are filled out (that is usually the dataset ID) + prop.assert_dataset_filled()?; + + // now, let's validate the fields (we know the required ones are filled in) + match prop.get_prop_data() { + DynPropertyType::GenericDynProp(generic_prop) => { + HctlFormula::check_syntax_with_model(&generic_prop.raw_formula, &self.model)?; + } + DynPropertyType::HasAttractor(p) => { + self.assert_dataset_valid(p.dataset.as_ref().unwrap())?; + self.assert_obs_valid_or_none(p.dataset.as_ref().unwrap(), p.observation.as_ref())?; + } + DynPropertyType::ExistsFixedPoint(p) => { + self.assert_dataset_valid(p.dataset.as_ref().unwrap())?; + self.assert_obs_valid_or_none(p.dataset.as_ref().unwrap(), p.observation.as_ref())?; + } + DynPropertyType::ExistsTrajectory(p) => { + self.assert_dataset_valid(p.dataset.as_ref().unwrap())?; + } + DynPropertyType::ExistsTrapSpace(p) => { + self.assert_dataset_valid(p.dataset.as_ref().unwrap())?; + self.assert_obs_valid_or_none(p.dataset.as_ref().unwrap(), p.observation.as_ref())?; + } + DynPropertyType::AttractorCount(_) => {} // no fields that can be invalid + } + Ok(()) + } + + /// Check that variable is valid in a model. If not, return error with a proper message. + fn assert_var_valid(&self, var_id: &VarId) -> Result<(), String> { + if self.model.is_valid_var_id(var_id) { + Ok(()) + } else { + let msg = format!("Variable `{var_id}` is not a valid variable in the model."); + Err(msg) + } + } + + /// Check that function is valid in a model. If not, return error with a proper message. + fn assert_fn_valid(&self, fn_id: &UninterpretedFnId) -> Result<(), String> { + if self.model.is_valid_uninterpreted_fn_id(fn_id) { + Ok(()) + } else { + let msg = format!("Function `{fn_id}` is not a valid function in the model."); + Err(msg) + } + } + + /// Check that input index of uninterpreted function is in range (smaller than the arity). + /// If not, return error with a proper message. + fn assert_index_valid(&self, index: usize, fn_id: &UninterpretedFnId) -> Result<(), String> { + let arity = self.model.get_uninterpreted_fn_arity(fn_id)?; + if arity <= index { + let msg = + format!("Function `{fn_id}` has arity {arity}, input index {index} is invalid."); + return Err(msg); + } + Ok(()) + } + + /// Check that context formula is valid. If not, return error with a proper message. + /// + /// The context can also be None, which is valid. + fn assert_context_valid_or_none(&self, context: Option<&String>) -> Result<(), String> { + if let Some(context_str) = context { + if let Err(e) = FirstOrderFormula::check_syntax_with_model(context_str, &self.model) { + let msg = format!("Invalid context formula. {e}"); + return Err(msg); + } + } + Ok(()) + } + + /// Check that dataset is valid. If not, return error with a proper message. + fn assert_dataset_valid(&self, dataset_id: &DatasetId) -> Result<(), String> { + if self.observations.is_valid_dataset_id(dataset_id) { + Ok(()) + } else { + Err(format!("Dataset `{dataset_id}` is not a valid dataset.")) + } + } + + /// Check whether observation is valid in a dataset. If not, return error with a proper message. + /// If observation is None, that is also fine. + fn assert_obs_valid_or_none( + &self, + dataset_id: &DatasetId, + obs_id: Option<&ObservationId>, + ) -> Result<(), String> { + if let Some(obs) = obs_id { + let dataset = self.observations.get_dataset(dataset_id)?; + if !dataset.is_valid_observation(obs) { + let msg = format!("Observation `{obs}` is not valid in dataset `{dataset_id}`."); + return Err(msg); + } + } + Ok(()) + } +} + +/// **(internal)** Simple internal utility to append issue message regarding a particular property. +fn append_property_issue(description: &str, prop_id: &str, mut log: String) -> String { + let issue = format!("> ISSUE with property `{}`: {description}\n", prop_id); + log += &issue; + log +} diff --git a/src-tauri/src/sketchbook/_sketch/_impl_import.rs b/src-tauri/src/sketchbook/_sketch/_impl_import.rs index 4fb5924..d6fc313 100644 --- a/src-tauri/src/sketchbook/_sketch/_impl_import.rs +++ b/src-tauri/src/sketchbook/_sketch/_impl_import.rs @@ -11,7 +11,7 @@ type NamedProperties = Vec<(String, String)>; impl Sketch { /// Create sketch instance from a AEON model format. /// - // TODO: aeon format currently does not support template properties (and datasets) + // TODO: aeon format currently does not support template properties and datasets. pub fn from_aeon(aeon_str: &str) -> Result { let mut sketch = Sketch::default(); diff --git a/src-tauri/src/sketchbook/_sketch/_impl_session_state.rs b/src-tauri/src/sketchbook/_sketch/_impl_session_state.rs index f5f286d..108b40c 100644 --- a/src-tauri/src/sketchbook/_sketch/_impl_session_state.rs +++ b/src-tauri/src/sketchbook/_sketch/_impl_session_state.rs @@ -56,7 +56,7 @@ impl SessionState for Sketch { file.read_to_string(&mut contents)?; // parse the AEON format - // TODO: aeon format currently does not support template properties (and datasets) + // TODO: aeon format currently does not support template properties and datasets let new_sketch = Sketch::from_aeon(&contents)?; self.modify_from_sketch(&new_sketch); @@ -70,7 +70,7 @@ impl SessionState for Sketch { } else if Self::starts_with("check_consistency", at_path).is_some() { let (success, message) = self.run_consistency_check(); let results = if success { - format!("Seems there are no issues with the sketch!\n\n{message}") + "No issues with the sketch were discovered!".to_string() } else { format!("There are issues with the sketch:\n\n{message}") }; @@ -82,6 +82,10 @@ impl SessionState for Sketch { state_change, reset: false, }) + } else if Self::starts_with("assert_consistency", at_path).is_some() { + // this is a "synthetic" event that either returns an error, or Consumed::NoChange + self.assert_consistency()?; + Ok(Consumed::NoChange) } else { Self::invalid_path_error_generic(at_path) } diff --git a/src-tauri/src/sketchbook/_sketch/_impl_sketch.rs b/src-tauri/src/sketchbook/_sketch/_impl_sketch.rs index c3fa4c6..4508f76 100644 --- a/src-tauri/src/sketchbook/_sketch/_impl_sketch.rs +++ b/src-tauri/src/sketchbook/_sketch/_impl_sketch.rs @@ -1,13 +1,10 @@ use crate::sketchbook::data_structs::SketchData; use crate::sketchbook::model::ModelState; use crate::sketchbook::observations::{Dataset, ObservationManager}; -use crate::sketchbook::properties::dynamic_props::DynPropertyType; -use crate::sketchbook::properties::static_props::StatPropertyType; -use crate::sketchbook::properties::{ - DynProperty, FirstOrderFormula, HctlFormula, PropertyManager, StatProperty, -}; +use crate::sketchbook::properties::{DynProperty, PropertyManager, StatProperty}; use crate::sketchbook::Sketch; +/// Utility functions for creating or modifying sketch instances. impl Sketch { /// Parse and validate all components of `Sketch` from a corresponding `SketchData` instance. pub fn components_from_sketch_data( @@ -87,73 +84,4 @@ impl Sketch { self.observations = ObservationManager::default(); self.properties = PropertyManager::default(); } - - /// General check that all components of the sketch are consistent together. - /// This should include: - /// - check that dataset variables are valid network variables - /// - check that template properties only use valid variables and data - /// - check that HCTL properties only use valid variables as atomic propositions - /// - check that FOL properties only use valid function symbols - pub fn run_consistency_check(&self) -> (bool, String) { - let mut all_consitent = true; - let mut message = String::new(); - message += "MODEL:\n"; - if self.model.num_vars() == 0 { - all_consitent = false; - message += "> ISSUE: There must be at least one variable.\n"; - } - // todo - message += "(this part is not fully implemented yet)\n\n"; - - message += "DATASET:\n"; - // todo - message += "(this part is not fully implemented yet)\n\n"; - - message += "STATIC PROPERTIES:\n"; - let mut stat_err_found = false; - let mut at_least_one_stat_generic = false; - for (prop_id, prop) in self.properties.stat_props() { - if let StatPropertyType::GenericStatProp(generic_prop) = prop.get_prop_data() { - at_least_one_stat_generic = true; - if let Err(e) = FirstOrderFormula::check_syntax_with_model( - &generic_prop.raw_formula, - &self.model, - ) { - let issue = format!("> ISSUE with property `{}`: {e}\n", prop_id.as_str()); - message += &issue; - stat_err_found = true; - } - } - // TODO: rest is not implemented yet - } - if at_least_one_stat_generic && !stat_err_found { - message += "> No issues with Generic static properties found.\n"; - } - message += "(this part is not fully implemented yet)\n\n"; - all_consitent = all_consitent && !stat_err_found; - - message += "DYNAMIC PROPERTIES:\n"; - let mut dyn_err_found = false; - let mut at_least_one_dyn_generic = false; - for (prop_id, prop) in self.properties.dyn_props() { - if let DynPropertyType::GenericDynProp(generic_prop) = prop.get_prop_data() { - at_least_one_dyn_generic = true; - if let Err(e) = - HctlFormula::check_syntax_with_model(&generic_prop.raw_formula, &self.model) - { - let issue = format!("> ISSUE with property `{}`: {e}\n", prop_id.as_str()); - message += &issue; - dyn_err_found = true; - } - } - // TODO: rest is not implemented yet - } - if at_least_one_dyn_generic && !dyn_err_found { - message += "> No issues with Generic dynamic properties found.\n"; - } - message += "(this part is not fully implemented yet)\n\n"; - all_consitent = all_consitent && !dyn_err_found; - - (all_consitent, message) - } } diff --git a/src-tauri/src/sketchbook/_sketch/mod.rs b/src-tauri/src/sketchbook/_sketch/mod.rs index 7cbe6cf..f14804f 100644 --- a/src-tauri/src/sketchbook/_sketch/mod.rs +++ b/src-tauri/src/sketchbook/_sketch/mod.rs @@ -4,6 +4,8 @@ use crate::sketchbook::properties::PropertyManager; use crate::sketchbook::{JsonSerde, Manager}; use serde::{Deserialize, Serialize}; +/// **(internal)** Utilities to check consistency of `Sketch` instances. +mod _impl_consistency; /// **(internal)** Exporting sketch in various formats. mod _impl_export; /// **(internal)** Importing sketch in various formats. diff --git a/src-tauri/src/sketchbook/_tests_events/_model.rs b/src-tauri/src/sketchbook/_tests_events/_model.rs index ae7427f..182dd40 100644 --- a/src-tauri/src/sketchbook/_tests_events/_model.rs +++ b/src-tauri/src/sketchbook/_tests_events/_model.rs @@ -9,7 +9,7 @@ use crate::sketchbook::JsonSerde; #[test] /// Test adding variable via events. -/// todo - test more complex variant +/// TODO - add tests for more complex scenarios fn test_add_var() { let variables = vec![("a", "a")]; let mut model = ModelState::new_from_vars(variables).unwrap(); @@ -172,7 +172,7 @@ fn test_invalid_var_events() { #[test] /// Test adding regulation via (raw) event. -/// todo: add complex version that adds regulation which requires adding static properties +/// TODO: add tests for more complex scenarios which requires changes in static properties fn test_add_reg_simple() { let variables = vec![("a", "a"), ("b", "b")]; let mut model = ModelState::new_from_vars(variables).unwrap(); @@ -192,7 +192,7 @@ fn test_add_reg_simple() { #[test] /// Test changing regulation's monotonicity and essentiality via event. -/// todo: add complex version which requires changes in static properties +/// TODO: add tests for more complex scenarios which requires changes in static properties fn test_change_reg_sign_essentiality() { let variables = vec![("a", "a_name"), ("b", "b_name")]; let mut model = ModelState::new_from_vars(variables).unwrap(); @@ -225,7 +225,7 @@ fn test_change_reg_sign_essentiality() { #[test] /// Test removing regulation via (raw) event. -/// todo: add complex version that removes regulation which requires removing static properties +/// TODO: add tests for more complex scenarios which requires changes in static properties fn test_remove_reg_simple() { let variables = vec![("a", "a_name"), ("b", "b_name")]; let mut model = ModelState::new_from_vars(variables).unwrap(); diff --git a/src-tauri/src/sketchbook/bn_utils.rs b/src-tauri/src/sketchbook/bn_utils.rs index df58feb..61f9815 100644 --- a/src-tauri/src/sketchbook/bn_utils.rs +++ b/src-tauri/src/sketchbook/bn_utils.rs @@ -1,8 +1,8 @@ use crate::sketchbook::model::{Essentiality, Monotonicity}; use biodivine_lib_param_bn::Monotonicity as Lib_Pbn_Monotonicity; -/// **(internal)** Static utility method to convert regulation sign given by `Monotonicity` -/// used by `lib_param_bn` into the type `Monotonicity` used here. +/// Utility to convert monotonicity enum used by `lib_param_bn` into the type used here. +/// /// TODO: note that `lib-param-bn` currently cannot express `Dual` variant of `Monotonicity`. pub fn sign_from_monotonicity(monotonicity: Option) -> Monotonicity { match monotonicity { @@ -14,31 +14,36 @@ pub fn sign_from_monotonicity(monotonicity: Option) -> Mon } } -/// **(internal)** Static utility method to convert regulation sign from the type -/// `Monotonicity` used here into the type `Monotonicity` used in `lib_param_bn`. -/// TODO: note that `lib-param-bn` currently cannot express `Dual` variant of `Monotonicity` and `Unknown` is used instead. +/// Utility to convert regulation sign from enum type used in this crate into the type used in `lib_param_bn`. +/// +/// TODO: note that `lib-param-bn` currently cannot express `Dual` variant of `Monotonicity`. We convert it +/// to `Unknown` instead. pub fn sign_to_monotonicity(regulation_sign: &Monotonicity) -> Option { match regulation_sign { Monotonicity::Activation => Some(Lib_Pbn_Monotonicity::Activation), Monotonicity::Inhibition => Some(Lib_Pbn_Monotonicity::Inhibition), Monotonicity::Unknown => None, - // todo: fix + // todo: maybe put "unimplemented" here? Monotonicity::Dual => None, } } -/// **(internal)** Static utility method to convert `Essentiality` from boolean. -/// TODO: note that `lib-param-bn` currently cannot distinguish between `False` and `Unknown` variants of `Essentiality`. +/// Utility method to convert `Essentiality` from boolean. +/// +/// TODO: note that `lib-param-bn` currently cannot distinguish between `False` and `Unknown` +/// variants of `Essentiality`. In general, these are both represented by "false" in `lib-param-bn`. pub fn essentiality_from_bool(essentiality: bool) -> Essentiality { match essentiality { true => Essentiality::True, - // todo: fix, this is how it works now in `lib-param-bn` + // this is how it currently works now in `lib-param-bn` false => Essentiality::Unknown, } } -/// **(internal)** Static utility method to convert `Essentiality` to boolean. -/// TODO: note that `lib-param-bn` currently cannot distinguish between `False` and `Unknown` variants of `Essentiality`. +/// Utility method to convert `Essentiality` into boolean. +/// +/// TODO: note that `lib-param-bn` currently cannot distinguish between `False` and `Unknown` +/// variants of `Essentiality`. In general, these are both represented by "false" in `lib-param-bn`. pub fn essentiality_to_bool(essentiality: Essentiality) -> bool { match essentiality { Essentiality::True => true, diff --git a/src-tauri/src/sketchbook/layout/_node_layout.rs b/src-tauri/src/sketchbook/layout/_node_layout.rs index bdb5d33..785ce13 100644 --- a/src-tauri/src/sketchbook/layout/_node_layout.rs +++ b/src-tauri/src/sketchbook/layout/_node_layout.rs @@ -9,7 +9,7 @@ use std::fmt::{Display, Error, Formatter}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LayoutNode { position: NodePosition, - // todo: add other data (visibility, colour, shape, ...) + // TODO: add other data in future (visibility, colour, shape, ...) } impl LayoutNode { diff --git a/src-tauri/src/sketchbook/model/_model_state/_impl_editing.rs b/src-tauri/src/sketchbook/model/_model_state/_impl_editing.rs index c814dc8..9e17879 100644 --- a/src-tauri/src/sketchbook/model/_model_state/_impl_editing.rs +++ b/src-tauri/src/sketchbook/model/_model_state/_impl_editing.rs @@ -33,7 +33,6 @@ impl ModelState { /// Create a new `ModelState` given a corresponding `ModelData` instance. /// - /// It is not that efficient currently, but should result in correct/consistent model. /// TODO: try to rewrite more efficiently without compromising the correctness. pub fn new_from_model_data(model_data: &ModelData) -> Result { // start with variables and plain function symbols (so that they can be used in expressions later) @@ -597,7 +596,7 @@ impl ModelState { } // substitute id for this uninterpreted fn in all uninterpreted functions' expressions - // todo - inefficient + // TODO: this is a bit inefficient for fn_id in self.uninterpreted_fns.clone().keys() { let uninterpreted_fn = self.uninterpreted_fns.remove(fn_id).unwrap(); let new_uninterpreted_fn = UninterpretedFn::with_substituted_fn_symbol( diff --git a/src-tauri/src/sketchbook/model/_model_state/_impl_observing.rs b/src-tauri/src/sketchbook/model/_model_state/_impl_observing.rs index 8651916..d893a4b 100644 --- a/src-tauri/src/sketchbook/model/_model_state/_impl_observing.rs +++ b/src-tauri/src/sketchbook/model/_model_state/_impl_observing.rs @@ -148,6 +148,16 @@ impl ModelState { Ok(uninterpreted_fn) } + /// Return arity of a `UninterpretedFn` corresponding to a given `UninterpretedFnId`. + /// + /// Return `Err` if no such uninterpreted fn exists (the ID is invalid in this context). + pub fn get_uninterpreted_fn_arity(&self, fn_id: &UninterpretedFnId) -> Result { + let uninterpreted_fn = self.uninterpreted_fns.get(fn_id).ok_or(format!( + "UninterpretedFn with ID {fn_id} does not exist in this model." + ))?; + Ok(uninterpreted_fn.get_arity()) + } + /// Shortcut to return a name of the variable corresponding to a given `VarId`. /// /// Return `Err` if such variable does not exist (the ID is invalid in this context). diff --git a/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_layout.rs b/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_layout.rs index e6864de..e6f6124 100644 --- a/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_layout.rs +++ b/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_layout.rs @@ -103,7 +103,7 @@ impl ModelState { self.remove_layout(&layout_id)?; let state_change = mk_model_state_change(&["layout", "remove"], &layout_data); - // todo make reversible in the future? + // TODO: make this reversible in the future? Ok(Consumed::Irreversible { state_change, reset: true, diff --git a/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_uninterpreted_fns.rs b/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_uninterpreted_fns.rs index 61384ad..9b2e752 100644 --- a/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_uninterpreted_fns.rs +++ b/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_uninterpreted_fns.rs @@ -141,7 +141,7 @@ impl ModelState { } // TODO: all changes to update/uninterpreted functions (where this function symbol - // TODO: appears) must be propagated to the frontend + // appears) must be propagated to the frontend // perform the event, prepare the state-change variant (move id from path to payload) self.set_uninterpreted_fn_id_by_str(fn_id.as_str(), new_id.as_str())?; diff --git a/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_variables.rs b/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_variables.rs index 357811d..9ee5744 100644 --- a/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_variables.rs +++ b/src-tauri/src/sketchbook/model/_model_state/_impl_session_state/_events_variables.rs @@ -250,8 +250,8 @@ impl ModelState { } // TODO: all changes to update functions (where this variable appears) must be - // TODO: propagated to the frontend -- for now, frontend just refreshes the content - // TODO: afterwards, but could probably be avoided... + // propagated to the frontend -- for now, frontend just refreshes the content + // afterwards, but that could probably be avoided... // perform the event, prepare the state-change variant (move id from path to payload) self.set_var_id_by_str(var_id.as_str(), new_id.as_str())?; diff --git a/src-tauri/src/sketchbook/model/_variable.rs b/src-tauri/src/sketchbook/model/_variable.rs index 19fe84f..63142f5 100644 --- a/src-tauri/src/sketchbook/model/_variable.rs +++ b/src-tauri/src/sketchbook/model/_variable.rs @@ -8,7 +8,7 @@ use std::fmt::{Display, Error, Formatter}; #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Serialize, Deserialize)] pub struct Variable { name: String, - // todo: add compartment and more + // TODO: add compartments in future } impl Variable { diff --git a/src-tauri/src/sketchbook/properties/dynamic_props/_dynamic_property.rs b/src-tauri/src/sketchbook/properties/dynamic_props/_dynamic_property.rs index 3ac295b..a2c2a9b 100644 --- a/src-tauri/src/sketchbook/properties/dynamic_props/_dynamic_property.rs +++ b/src-tauri/src/sketchbook/properties/dynamic_props/_dynamic_property.rs @@ -283,4 +283,40 @@ impl DynProperty { pub fn get_prop_data(&self) -> &DynPropertyType { &self.variant } + + /// Check that the property has all required fields filled out. That is just the `dataset` + /// in most cases at the moment. + /// If some of the required field is set to None, return error. + pub fn assert_dataset_filled(&self) -> Result<(), String> { + let missing_field_msg = "One of the required fields is not filled."; + + match &self.variant { + DynPropertyType::GenericDynProp(_) => {} // no fields that can be None + DynPropertyType::AttractorCount(_) => {} // no fields that can be None + DynPropertyType::HasAttractor(p) => { + // only dataset has to be filled, observation ID is optional + if p.dataset.is_none() { + return Err(missing_field_msg.to_string()); + } + } + DynPropertyType::ExistsFixedPoint(p) => { + // only dataset has to be filled, observation ID is optional + if p.dataset.is_none() { + return Err(missing_field_msg.to_string()); + } + } + DynPropertyType::ExistsTrajectory(p) => { + if p.dataset.is_none() { + return Err(missing_field_msg.to_string()); + } + } + DynPropertyType::ExistsTrapSpace(p) => { + // only dataset has to be filled, observation ID is optional + if p.dataset.is_none() { + return Err(missing_field_msg.to_string()); + } + } + } + Ok(()) + } } diff --git a/src-tauri/src/sketchbook/properties/dynamic_props/_hctl_formula.rs b/src-tauri/src/sketchbook/properties/dynamic_props/_hctl_formula.rs index c384e29..87996af 100644 --- a/src-tauri/src/sketchbook/properties/dynamic_props/_hctl_formula.rs +++ b/src-tauri/src/sketchbook/properties/dynamic_props/_hctl_formula.rs @@ -1,6 +1,4 @@ use crate::sketchbook::model::ModelState; -use crate::sketchbook::observations::{Dataset, Observation}; -use crate::sketchbook::properties::dynamic_props::_mk_hctl_formulas::*; use biodivine_hctl_model_checker::preprocessing::hctl_tree::HctlTreeNode; use biodivine_hctl_model_checker::preprocessing::parser::{ parse_and_minimize_hctl_formula, parse_hctl_formula, @@ -52,39 +50,6 @@ impl HctlFormula { tree: parse_hctl_formula(formula)?, }) } - - /// Encode an observation by a (propositional) formula depicting the corresponding state/sub-space. - /// The observation's binary values are used to create a conjunction of literals. - /// The `var_names` are used as propositions names in the formula. - pub fn encode_observation( - obs: &Observation, - var_names: &[String], - ) -> Result { - let formula = encode_observation(obs, var_names)?; - HctlFormula::try_from_str(&formula) - } - - /// Encode each of the several observations, one by one. - /// For details, see [Self::encode_observation]. - pub fn encode_multiple_observations( - observations: &[Observation], - var_names: &[String], - ) -> Result, String> { - let formulae = encode_multiple_observations(observations, var_names)?; - formulae - .iter() - .map(|f| HctlFormula::try_from_str(f)) - .collect::, String>>() - } - - /// Encode a dataset of observations as a single HCTL formula. The particular formula - /// template is chosen depending on the type of data (attractor data, time-series, ...). - /// - /// Only data with their type specified can be encoded. - pub fn try_encode_dataset_hctl(dataset: &Dataset) -> Result { - let formula = encode_dataset_hctl(dataset)?; - HctlFormula::try_from_str(&formula) - } } /// Editing HCTL formulas. @@ -98,10 +63,15 @@ impl HctlFormula { /// Observing HCTL formulas. impl HctlFormula { - /// Str reference version of the first-order formula. + /// Reference to a string form of the HCTL formula. pub fn as_str(&self) -> &str { &self.tree.formula_str } + + /// Reference to a syntax tree of the HCTL formula. + pub fn tree(&self) -> &HctlTreeNode { + &self.tree + } } /// Static methods (to check validity of formula strings). diff --git a/src-tauri/src/sketchbook/properties/dynamic_props/mod.rs b/src-tauri/src/sketchbook/properties/dynamic_props/mod.rs index 09c4c7d..835ad5c 100644 --- a/src-tauri/src/sketchbook/properties/dynamic_props/mod.rs +++ b/src-tauri/src/sketchbook/properties/dynamic_props/mod.rs @@ -5,10 +5,6 @@ mod _hctl_formula; /// **(internal)** Variants of dynamic properties. mod _property_types; -/// **(internal)** Utility functions for automatically generating HCTL formulae. -#[allow(dead_code)] -mod _mk_hctl_formulas; - pub use _dynamic_property::DynProperty; pub use _hctl_formula::HctlFormula; pub use _property_types::*; diff --git a/src-tauri/src/sketchbook/properties/static_props/_first_order_formula.rs b/src-tauri/src/sketchbook/properties/static_props/_first_order_formula.rs index 8a7e75b..d734706 100644 --- a/src-tauri/src/sketchbook/properties/static_props/_first_order_formula.rs +++ b/src-tauri/src/sketchbook/properties/static_props/_first_order_formula.rs @@ -1,5 +1,5 @@ use crate::algorithms::fo_logic::fol_tree::FolTreeNode; -use crate::algorithms::fo_logic::parser::parse_fol_formula; +use crate::algorithms::fo_logic::parser::{parse_and_minimize_fol_formula, parse_fol_formula}; use crate::algorithms::fo_logic::utils::*; use crate::sketchbook::model::ModelState; use biodivine_lib_param_bn::symbolic_async_graph::SymbolicContext; @@ -62,22 +62,23 @@ impl FirstOrderFormula { /// Observing first-order formulas. impl FirstOrderFormula { - /// Str reference version of the first-order formula. + /// Reference to a string form of the FOL formula. pub fn as_str(&self) -> &str { &self.tree.formula_str } + + /// Reference to a syntax tree of the first-order formula. + pub fn tree(&self) -> &FolTreeNode { + &self.tree + } } /// Static methods (to check validity of formula strings). impl FirstOrderFormula { /// Check if the formula is correctly formed based on predefined FOL syntactic rules. pub fn check_pure_syntax(formula: &str) -> Result<(), String> { - let res = parse_fol_formula(formula); - if res.is_ok() { - Ok(()) - } else { - Err(res.err().unwrap()) - } + // we have to provide some placeholder name (for minimization), but it does not matter here + parse_and_minimize_fol_formula(formula, "PLACEHOLDER").map(|_| ()) } /// Check if the formula is correctly formed based on predefined FO syntactic rules, and also @@ -86,7 +87,9 @@ impl FirstOrderFormula { pub fn check_syntax_with_model(formula: &str, model: &ModelState) -> Result<(), String> { let bn = model.to_bn(); let ctx = SymbolicContext::new(&bn)?; - let tree = parse_fol_formula(formula)?; + + // we have to provide some placeholder name (for minimization), but it does not matter here + let tree = parse_and_minimize_fol_formula(formula, "PLACEHOLDER")?; // check if all functions valid let function_symbols = collect_unique_fn_symbols(&tree)?; diff --git a/src-tauri/src/sketchbook/properties/static_props/_static_property.rs b/src-tauri/src/sketchbook/properties/static_props/_static_property.rs index 22537f0..5472921 100644 --- a/src-tauri/src/sketchbook/properties/static_props/_static_property.rs +++ b/src-tauri/src/sketchbook/properties/static_props/_static_property.rs @@ -237,7 +237,7 @@ impl StatProperty { None, None, Essentiality::Unknown, - String::new(), + "true".to_string(), ) .unwrap() } @@ -257,7 +257,7 @@ impl StatProperty { None, None, Monotonicity::Unknown, - String::new(), + "true".to_string(), ) .unwrap() } @@ -282,7 +282,7 @@ impl StatProperty { None, None, Essentiality::Unknown, - String::new(), + "true".to_string(), ) .unwrap() } @@ -307,15 +307,12 @@ impl StatProperty { None, None, Monotonicity::Unknown, - String::new(), + "true".to_string(), ) .unwrap() } } -/// Editing static properties. -impl StatProperty {} - /// Editing static properties. impl StatProperty { /// Set property's name. @@ -477,4 +474,39 @@ impl StatProperty { pub fn get_prop_data(&self) -> &StatPropertyType { &self.variant } + + /// Check that the property has all required fields filled out. + /// If some of the required field is set to None, return error. + pub fn assert_fully_filled(&self) -> Result<(), String> { + let missing_field_msg = "One of the required fields is not filled."; + + match &self.variant { + StatPropertyType::GenericStatProp(_) => {} // no fields that can be None + StatPropertyType::FnInputEssential(p) + | StatPropertyType::FnInputEssentialContext(p) => { + if p.input_index.is_none() || p.target.is_none() { + return Err(missing_field_msg.to_string()); + } + } + StatPropertyType::FnInputMonotonic(p) + | StatPropertyType::FnInputMonotonicContext(p) => { + if p.input_index.is_none() || p.target.is_none() { + return Err(missing_field_msg.to_string()); + } + } + StatPropertyType::RegulationEssential(p) + | StatPropertyType::RegulationEssentialContext(p) => { + if p.input.is_none() || p.target.is_none() { + return Err(missing_field_msg.to_string()); + } + } + StatPropertyType::RegulationMonotonic(p) + | StatPropertyType::RegulationMonotonicContext(p) => { + if p.input.is_none() || p.target.is_none() { + return Err(missing_field_msg.to_string()); + } + } + } + Ok(()) + } } diff --git a/src/html/component-editor/properties-editor/properties-editor.ts b/src/html/component-editor/properties-editor/properties-editor.ts index f452c6b..845b329 100644 --- a/src/html/component-editor/properties-editor/properties-editor.ts +++ b/src/html/component-editor/properties-editor/properties-editor.ts @@ -32,6 +32,8 @@ export default class PropertiesEditor extends LitElement { @query('#add-static-property-button') declare addStaticPropertyElement: HTMLElement @state() addDynamicMenuVisible = false @state() addStaticMenuVisible = false + // visibility of automatically generated regulation properties + @state() showRegulationProperties = true; addDynamicPropertyMenu: IAddPropertyItem[] = [ { @@ -287,6 +289,11 @@ export default class PropertiesEditor extends LitElement { } } + toggleRegulationPropertiesVisibility(): void { + this.showRegulationProperties = !this.showRegulationProperties; + } + + render (): TemplateResult { return html` +
+ +
${this.contentData?.staticProperties.length === 0 ? html`
No static properties defined
` : ''}
${map(this.contentData.staticProperties, (prop, index) => { @@ -339,11 +351,16 @@ export default class PropertiesEditor extends LitElement { .property=${prop}> ` case StaticPropertyType.FunctionInputEssential: - case StaticPropertyType.VariableRegulationEssential: return html` ` + case StaticPropertyType.VariableRegulationEssential: + // Only render if showRegulationProperties is true + return this.showRegulationProperties ? html` + + ` : '' case StaticPropertyType.FunctionInputEssentialWithCondition: case StaticPropertyType.VariableRegulationEssentialWithCondition: return html` @@ -352,11 +369,16 @@ export default class PropertiesEditor extends LitElement { .property=${prop}> ` case StaticPropertyType.FunctionInputMonotonic: - case StaticPropertyType.VariableRegulationMonotonic: return html` ` + case StaticPropertyType.VariableRegulationMonotonic: + // Only render if showRegulationProperties is true + return this.showRegulationProperties ? html` + + ` : '' case StaticPropertyType.FunctionInputMonotonicWithCondition: case StaticPropertyType.VariableRegulationMonotonicWithCondition: return html`