stash
This commit is contained in:
		
							parent
							
								
									1f380c8619
								
							
						
					
					
						commit
						22ae1bed2e
					
				
					 38 changed files with 819 additions and 38 deletions
				
			
		| 
						 | 
					@ -1,6 +1,6 @@
 | 
				
			||||||
FROM sjkillen/clyngor
 | 
					FROM sjkillen/clyngor
 | 
				
			||||||
 | 
					
 | 
				
			||||||
RUN apt-get -qq update; apt-get -qq install sudo
 | 
					RUN apt-get -qq update; apt-get -qq install sudo default-jdk make
 | 
				
			||||||
 | 
					
 | 
				
			||||||
ARG USERNAME=asp
 | 
					ARG USERNAME=asp
 | 
				
			||||||
ARG USER_UID=1000
 | 
					ARG USER_UID=1000
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										2
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							| 
						 | 
					@ -1 +1,3 @@
 | 
				
			||||||
notes
 | 
					notes
 | 
				
			||||||
 | 
					antlr4.jar
 | 
				
			||||||
 | 
					logic/.antlr
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										2
									
								
								.vscode/launch.json
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.vscode/launch.json
									
										
									
									
										vendored
									
									
								
							| 
						 | 
					@ -9,7 +9,7 @@
 | 
				
			||||||
            "type": "python",
 | 
					            "type": "python",
 | 
				
			||||||
            "request": "launch",
 | 
					            "request": "launch",
 | 
				
			||||||
            "program": "${file}",
 | 
					            "program": "${file}",
 | 
				
			||||||
            "args": ["programs/a.lp"],
 | 
					            "args": ["programs/c.lp"],
 | 
				
			||||||
            "console": "integratedTerminal",
 | 
					            "console": "integratedTerminal",
 | 
				
			||||||
            "justMyCode": true
 | 
					            "justMyCode": true
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										14
									
								
								NOTES.md
									
										
									
									
									
								
							
							
						
						
									
										14
									
								
								NOTES.md
									
										
									
									
									
								
							| 
						 | 
					@ -1,4 +1,16 @@
 | 
				
			||||||
# Technical issues
 | 
					# Technical issues
 | 
				
			||||||
- Clingo may or may not choose to propagate DL atoms before the ontology tells it to.
 | 
					- Clingo may or may not choose to propagate DL atoms before the ontology tells it to.
 | 
				
			||||||
- Symbols for atoms are not always present in the control object (E.g. a rule `a :- a`)     
 | 
					- Symbols for atoms are not always present in the control object (E.g. a rule `a :- a`)     
 | 
				
			||||||
Current workaround is to use signature (which don't get removed). This may have implications about what can be grounded.
 | 
					Current workaround is to use signature (which don't get removed). This may have implications about what can be grounded.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- Minimality checking
 | 
				
			||||||
 | 
					- The solver should never "decide" to propagate theory atoms, but it does
 | 
				
			||||||
 | 
					This might be blockable with tagged clauses, but since the solver unifies solver literals with their theory literals, this may block the regular atoms sometimes.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- CLingo unifying the atoms is a big problem
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- If theory atoms are false, then it didn't come from ontology
 | 
				
			||||||
 | 
					Maybe this will work:
 | 
				
			||||||
 | 
					if the final model is not minimal, then it can't have come from the ontology
 | 
				
			||||||
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										21
									
								
								example.py
									
										
									
									
									
								
							
							
						
						
									
										21
									
								
								example.py
									
										
									
									
									
								
							| 
						 | 
					@ -1,30 +1,33 @@
 | 
				
			||||||
from mimetypes import init
 | 
					 | 
				
			||||||
import clingo
 | 
					import clingo
 | 
				
			||||||
 | 
					
 | 
				
			||||||
PROGRAM_A = """
 | 
					PROGRAM = """
 | 
				
			||||||
#theory o {
 | 
					#theory o {
 | 
				
			||||||
    kterm {- : 0, unary };
 | 
					    kterm {- : 0, unary };
 | 
				
			||||||
    &o/0 : kterm, any
 | 
					    &o/0 : kterm, any
 | 
				
			||||||
}.
 | 
					}.
 | 
				
			||||||
a, b.
 | 
					
 | 
				
			||||||
a :- &o{a}.
 | 
					x :- &o{false}.
 | 
				
			||||||
b :- &o{b}.
 | 
					x :- &o{x}.
 | 
				
			||||||
 | 
					y :- &o{y}.
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class CheckAtoms:
 | 
					class CheckAtoms:
 | 
				
			||||||
    def init(self, init: clingo.PropagateInit):
 | 
					    def init(self, init: clingo.PropagateInit):
 | 
				
			||||||
        theory_lits = set(init.solver_literal(atom.literal) for atom in init.theory_atoms)
 | 
					        symbolic_lits = {atom: init.solver_literal(atom.literal) for atom in init.symbolic_atoms}
 | 
				
			||||||
        symbolic_lits = set(init.solver_literal(atom.literal) for atom in init.symbolic_atoms)
 | 
					        theory_lits = {init.solver_literal(atom.literal) for atom in init.theory_atoms}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if len(theory_lits & symbolic_lits) == 0:
 | 
					        overlap = tuple(atom for atom, lit in symbolic_lits.items() if lit in theory_lits)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if len(overlap) == 0:
 | 
				
			||||||
            print("There is no solver literal overlap")
 | 
					            print("There is no solver literal overlap")
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            print("There is overlap")
 | 
					            print("There is overlap")
 | 
				
			||||||
 | 
					            print(*(atom.symbol for atom in overlap))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
control = clingo.Control(["0"])
 | 
					control = clingo.Control(["0"])
 | 
				
			||||||
control.register_propagator(CheckAtoms())
 | 
					control.register_propagator(CheckAtoms())
 | 
				
			||||||
control.add("base", [], PROGRAM_A)
 | 
					control.add("base", [], PROGRAM)
 | 
				
			||||||
control.ground([("base", [])])
 | 
					control.ground([("base", [])])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
control.solve()
 | 
					control.solve()
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										18
									
								
								logic/Makefile
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								logic/Makefile
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,18 @@
 | 
				
			||||||
 | 
					all: grammars 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					antlr4.jar:
 | 
				
			||||||
 | 
						wget https://www.antlr.org/download/antlr-4.10.1-complete.jar
 | 
				
			||||||
 | 
						mv antlr-4.10.1-complete.jar antlr4.jar
 | 
				
			||||||
 | 
						python3 -m pip install antlr4-python3-runtime
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					.PHONY: grammars
 | 
				
			||||||
 | 
					grammars: grammars/__init__.py
 | 
				
			||||||
 | 
					grammars/__init__.py: O.g4 antlr4.jar
 | 
				
			||||||
 | 
						java -cp antlr4.jar org.antlr.v4.Tool -Dlanguage=Python3 $< -no-listener -visitor -o grammars
 | 
				
			||||||
 | 
						touch grammars/__init__.py
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					.PHONEY: clean
 | 
				
			||||||
 | 
					clean:
 | 
				
			||||||
 | 
						rm -rf grammars
 | 
				
			||||||
 | 
						rm -f antlr4.jar
 | 
				
			||||||
							
								
								
									
										14
									
								
								logic/O.g4
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								logic/O.g4
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,14 @@
 | 
				
			||||||
 | 
					grammar O;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					WS: [ \n\t\r]+ -> skip;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					expr:
 | 
				
			||||||
 | 
						'(' expr ')'			# parenth
 | 
				
			||||||
 | 
						| '-' expr				# log_neg
 | 
				
			||||||
 | 
						| expr op = '->' expr	# log_impl
 | 
				
			||||||
 | 
						| expr op = '<->' expr	# log_iff
 | 
				
			||||||
 | 
						| expr op = '|' expr	# log_or
 | 
				
			||||||
 | 
						| expr op = '&' expr	# log_and
 | 
				
			||||||
 | 
						| ATOM					# atom;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					ATOM: [a-z][a-zA-Z0-9]*;
 | 
				
			||||||
							
								
								
									
										0
									
								
								logic/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								logic/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										
											BIN
										
									
								
								logic/__pycache__/__init__.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								logic/__pycache__/__init__.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								logic/__pycache__/logic.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								logic/__pycache__/logic.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										30
									
								
								logic/grammars/O.interp
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								logic/grammars/O.interp
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,30 @@
 | 
				
			||||||
 | 
					token literal names:
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					'('
 | 
				
			||||||
 | 
					')'
 | 
				
			||||||
 | 
					'-'
 | 
				
			||||||
 | 
					'->'
 | 
				
			||||||
 | 
					'<->'
 | 
				
			||||||
 | 
					'|'
 | 
				
			||||||
 | 
					'&'
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					token symbolic names:
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					WS
 | 
				
			||||||
 | 
					ATOM
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					rule names:
 | 
				
			||||||
 | 
					expr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					atn:
 | 
				
			||||||
 | 
					[4, 1, 9, 30, 2, 0, 7, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 3, 0, 11, 8, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 5, 0, 25, 8, 0, 10, 0, 12, 0, 28, 9, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 34, 0, 10, 1, 0, 0, 0, 2, 3, 6, 0, -1, 0, 3, 4, 5, 1, 0, 0, 4, 5, 3, 0, 0, 0, 5, 6, 5, 2, 0, 0, 6, 11, 1, 0, 0, 0, 7, 8, 5, 3, 0, 0, 8, 11, 3, 0, 0, 6, 9, 11, 5, 9, 0, 0, 10, 2, 1, 0, 0, 0, 10, 7, 1, 0, 0, 0, 10, 9, 1, 0, 0, 0, 11, 26, 1, 0, 0, 0, 12, 13, 10, 5, 0, 0, 13, 14, 5, 4, 0, 0, 14, 25, 3, 0, 0, 6, 15, 16, 10, 4, 0, 0, 16, 17, 5, 5, 0, 0, 17, 25, 3, 0, 0, 5, 18, 19, 10, 3, 0, 0, 19, 20, 5, 6, 0, 0, 20, 25, 3, 0, 0, 4, 21, 22, 10, 2, 0, 0, 22, 23, 5, 7, 0, 0, 23, 25, 3, 0, 0, 3, 24, 12, 1, 0, 0, 0, 24, 15, 1, 0, 0, 0, 24, 18, 1, 0, 0, 0, 24, 21, 1, 0, 0, 0, 25, 28, 1, 0, 0, 0, 26, 24, 1, 0, 0, 0, 26, 27, 1, 0, 0, 0, 27, 1, 1, 0, 0, 0, 28, 26, 1, 0, 0, 0, 3, 10, 24, 26]
 | 
				
			||||||
							
								
								
									
										16
									
								
								logic/grammars/O.tokens
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								logic/grammars/O.tokens
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,16 @@
 | 
				
			||||||
 | 
					T__0=1
 | 
				
			||||||
 | 
					T__1=2
 | 
				
			||||||
 | 
					T__2=3
 | 
				
			||||||
 | 
					T__3=4
 | 
				
			||||||
 | 
					T__4=5
 | 
				
			||||||
 | 
					T__5=6
 | 
				
			||||||
 | 
					T__6=7
 | 
				
			||||||
 | 
					WS=8
 | 
				
			||||||
 | 
					ATOM=9
 | 
				
			||||||
 | 
					'('=1
 | 
				
			||||||
 | 
					')'=2
 | 
				
			||||||
 | 
					'-'=3
 | 
				
			||||||
 | 
					'->'=4
 | 
				
			||||||
 | 
					'<->'=5
 | 
				
			||||||
 | 
					'|'=6
 | 
				
			||||||
 | 
					'&'=7
 | 
				
			||||||
							
								
								
									
										44
									
								
								logic/grammars/OLexer.interp
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										44
									
								
								logic/grammars/OLexer.interp
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,44 @@
 | 
				
			||||||
 | 
					token literal names:
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					'('
 | 
				
			||||||
 | 
					')'
 | 
				
			||||||
 | 
					'-'
 | 
				
			||||||
 | 
					'->'
 | 
				
			||||||
 | 
					'<->'
 | 
				
			||||||
 | 
					'|'
 | 
				
			||||||
 | 
					'&'
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					token symbolic names:
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					null
 | 
				
			||||||
 | 
					WS
 | 
				
			||||||
 | 
					ATOM
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					rule names:
 | 
				
			||||||
 | 
					T__0
 | 
				
			||||||
 | 
					T__1
 | 
				
			||||||
 | 
					T__2
 | 
				
			||||||
 | 
					T__3
 | 
				
			||||||
 | 
					T__4
 | 
				
			||||||
 | 
					T__5
 | 
				
			||||||
 | 
					T__6
 | 
				
			||||||
 | 
					WS
 | 
				
			||||||
 | 
					ATOM
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					channel names:
 | 
				
			||||||
 | 
					DEFAULT_TOKEN_CHANNEL
 | 
				
			||||||
 | 
					HIDDEN
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					mode names:
 | 
				
			||||||
 | 
					DEFAULT_MODE
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					atn:
 | 
				
			||||||
 | 
					[4, 0, 9, 50, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 4, 7, 38, 8, 7, 11, 7, 12, 7, 39, 1, 7, 1, 7, 1, 8, 1, 8, 5, 8, 46, 8, 8, 10, 8, 12, 8, 49, 9, 8, 0, 0, 9, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 1, 0, 3, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 97, 122, 3, 0, 48, 57, 65, 90, 97, 122, 51, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 1, 19, 1, 0, 0, 0, 3, 21, 1, 0, 0, 0, 5, 23, 1, 0, 0, 0, 7, 25, 1, 0, 0, 0, 9, 28, 1, 0, 0, 0, 11, 32, 1, 0, 0, 0, 13, 34, 1, 0, 0, 0, 15, 37, 1, 0, 0, 0, 17, 43, 1, 0, 0, 0, 19, 20, 5, 40, 0, 0, 20, 2, 1, 0, 0, 0, 21, 22, 5, 41, 0, 0, 22, 4, 1, 0, 0, 0, 23, 24, 5, 45, 0, 0, 24, 6, 1, 0, 0, 0, 25, 26, 5, 45, 0, 0, 26, 27, 5, 62, 0, 0, 27, 8, 1, 0, 0, 0, 28, 29, 5, 60, 0, 0, 29, 30, 5, 45, 0, 0, 30, 31, 5, 62, 0, 0, 31, 10, 1, 0, 0, 0, 32, 33, 5, 124, 0, 0, 33, 12, 1, 0, 0, 0, 34, 35, 5, 38, 0, 0, 35, 14, 1, 0, 0, 0, 36, 38, 7, 0, 0, 0, 37, 36, 1, 0, 0, 0, 38, 39, 1, 0, 0, 0, 39, 37, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 41, 1, 0, 0, 0, 41, 42, 6, 7, 0, 0, 42, 16, 1, 0, 0, 0, 43, 47, 7, 1, 0, 0, 44, 46, 7, 2, 0, 0, 45, 44, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45, 1, 0, 0, 0, 47, 48, 1, 0, 0, 0, 48, 18, 1, 0, 0, 0, 49, 47, 1, 0, 0, 0, 3, 0, 39, 47, 1, 6, 0, 0]
 | 
				
			||||||
							
								
								
									
										70
									
								
								logic/grammars/OLexer.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										70
									
								
								logic/grammars/OLexer.py
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,70 @@
 | 
				
			||||||
 | 
					# Generated from O.g4 by ANTLR 4.10.1
 | 
				
			||||||
 | 
					from antlr4 import *
 | 
				
			||||||
 | 
					from io import StringIO
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					if sys.version_info[1] > 5:
 | 
				
			||||||
 | 
					    from typing import TextIO
 | 
				
			||||||
 | 
					else:
 | 
				
			||||||
 | 
					    from typing.io import TextIO
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def serializedATN():
 | 
				
			||||||
 | 
					    return [
 | 
				
			||||||
 | 
					        4,0,9,50,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,
 | 
				
			||||||
 | 
					        6,7,6,2,7,7,7,2,8,7,8,1,0,1,0,1,1,1,1,1,2,1,2,1,3,1,3,1,3,1,4,1,
 | 
				
			||||||
 | 
					        4,1,4,1,4,1,5,1,5,1,6,1,6,1,7,4,7,38,8,7,11,7,12,7,39,1,7,1,7,1,
 | 
				
			||||||
 | 
					        8,1,8,5,8,46,8,8,10,8,12,8,49,9,8,0,0,9,1,1,3,2,5,3,7,4,9,5,11,6,
 | 
				
			||||||
 | 
					        13,7,15,8,17,9,1,0,3,3,0,9,10,13,13,32,32,1,0,97,122,3,0,48,57,65,
 | 
				
			||||||
 | 
					        90,97,122,51,0,1,1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,
 | 
				
			||||||
 | 
					        1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,1,19,
 | 
				
			||||||
 | 
					        1,0,0,0,3,21,1,0,0,0,5,23,1,0,0,0,7,25,1,0,0,0,9,28,1,0,0,0,11,32,
 | 
				
			||||||
 | 
					        1,0,0,0,13,34,1,0,0,0,15,37,1,0,0,0,17,43,1,0,0,0,19,20,5,40,0,0,
 | 
				
			||||||
 | 
					        20,2,1,0,0,0,21,22,5,41,0,0,22,4,1,0,0,0,23,24,5,45,0,0,24,6,1,0,
 | 
				
			||||||
 | 
					        0,0,25,26,5,45,0,0,26,27,5,62,0,0,27,8,1,0,0,0,28,29,5,60,0,0,29,
 | 
				
			||||||
 | 
					        30,5,45,0,0,30,31,5,62,0,0,31,10,1,0,0,0,32,33,5,124,0,0,33,12,1,
 | 
				
			||||||
 | 
					        0,0,0,34,35,5,38,0,0,35,14,1,0,0,0,36,38,7,0,0,0,37,36,1,0,0,0,38,
 | 
				
			||||||
 | 
					        39,1,0,0,0,39,37,1,0,0,0,39,40,1,0,0,0,40,41,1,0,0,0,41,42,6,7,0,
 | 
				
			||||||
 | 
					        0,42,16,1,0,0,0,43,47,7,1,0,0,44,46,7,2,0,0,45,44,1,0,0,0,46,49,
 | 
				
			||||||
 | 
					        1,0,0,0,47,45,1,0,0,0,47,48,1,0,0,0,48,18,1,0,0,0,49,47,1,0,0,0,
 | 
				
			||||||
 | 
					        3,0,39,47,1,6,0,0
 | 
				
			||||||
 | 
					    ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class OLexer(Lexer):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    atn = ATNDeserializer().deserialize(serializedATN())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    T__0 = 1
 | 
				
			||||||
 | 
					    T__1 = 2
 | 
				
			||||||
 | 
					    T__2 = 3
 | 
				
			||||||
 | 
					    T__3 = 4
 | 
				
			||||||
 | 
					    T__4 = 5
 | 
				
			||||||
 | 
					    T__5 = 6
 | 
				
			||||||
 | 
					    T__6 = 7
 | 
				
			||||||
 | 
					    WS = 8
 | 
				
			||||||
 | 
					    ATOM = 9
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    modeNames = [ "DEFAULT_MODE" ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    literalNames = [ "<INVALID>",
 | 
				
			||||||
 | 
					            "'('", "')'", "'-'", "'->'", "'<->'", "'|'", "'&'" ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    symbolicNames = [ "<INVALID>",
 | 
				
			||||||
 | 
					            "WS", "ATOM" ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", 
 | 
				
			||||||
 | 
					                  "WS", "ATOM" ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    grammarFileName = "O.g4"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __init__(self, input=None, output:TextIO = sys.stdout):
 | 
				
			||||||
 | 
					        super().__init__(input, output)
 | 
				
			||||||
 | 
					        self.checkVersion("4.10.1")
 | 
				
			||||||
 | 
					        self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
 | 
				
			||||||
 | 
					        self._actions = None
 | 
				
			||||||
 | 
					        self._predicates = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
							
								
								
									
										16
									
								
								logic/grammars/OLexer.tokens
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								logic/grammars/OLexer.tokens
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,16 @@
 | 
				
			||||||
 | 
					T__0=1
 | 
				
			||||||
 | 
					T__1=2
 | 
				
			||||||
 | 
					T__2=3
 | 
				
			||||||
 | 
					T__3=4
 | 
				
			||||||
 | 
					T__4=5
 | 
				
			||||||
 | 
					T__5=6
 | 
				
			||||||
 | 
					T__6=7
 | 
				
			||||||
 | 
					WS=8
 | 
				
			||||||
 | 
					ATOM=9
 | 
				
			||||||
 | 
					'('=1
 | 
				
			||||||
 | 
					')'=2
 | 
				
			||||||
 | 
					'-'=3
 | 
				
			||||||
 | 
					'->'=4
 | 
				
			||||||
 | 
					'<->'=5
 | 
				
			||||||
 | 
					'|'=6
 | 
				
			||||||
 | 
					'&'=7
 | 
				
			||||||
							
								
								
									
										368
									
								
								logic/grammars/OParser.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										368
									
								
								logic/grammars/OParser.py
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,368 @@
 | 
				
			||||||
 | 
					# Generated from O.g4 by ANTLR 4.10.1
 | 
				
			||||||
 | 
					# encoding: utf-8
 | 
				
			||||||
 | 
					from antlr4 import *
 | 
				
			||||||
 | 
					from io import StringIO
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					if sys.version_info[1] > 5:
 | 
				
			||||||
 | 
						from typing import TextIO
 | 
				
			||||||
 | 
					else:
 | 
				
			||||||
 | 
						from typing.io import TextIO
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def serializedATN():
 | 
				
			||||||
 | 
					    return [
 | 
				
			||||||
 | 
					        4,1,9,30,2,0,7,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,3,0,11,8,0,1,0,
 | 
				
			||||||
 | 
					        1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,5,0,25,8,0,10,0,12,0,
 | 
				
			||||||
 | 
					        28,9,0,1,0,0,1,0,1,0,0,0,34,0,10,1,0,0,0,2,3,6,0,-1,0,3,4,5,1,0,
 | 
				
			||||||
 | 
					        0,4,5,3,0,0,0,5,6,5,2,0,0,6,11,1,0,0,0,7,8,5,3,0,0,8,11,3,0,0,6,
 | 
				
			||||||
 | 
					        9,11,5,9,0,0,10,2,1,0,0,0,10,7,1,0,0,0,10,9,1,0,0,0,11,26,1,0,0,
 | 
				
			||||||
 | 
					        0,12,13,10,5,0,0,13,14,5,4,0,0,14,25,3,0,0,6,15,16,10,4,0,0,16,17,
 | 
				
			||||||
 | 
					        5,5,0,0,17,25,3,0,0,5,18,19,10,3,0,0,19,20,5,6,0,0,20,25,3,0,0,4,
 | 
				
			||||||
 | 
					        21,22,10,2,0,0,22,23,5,7,0,0,23,25,3,0,0,3,24,12,1,0,0,0,24,15,1,
 | 
				
			||||||
 | 
					        0,0,0,24,18,1,0,0,0,24,21,1,0,0,0,25,28,1,0,0,0,26,24,1,0,0,0,26,
 | 
				
			||||||
 | 
					        27,1,0,0,0,27,1,1,0,0,0,28,26,1,0,0,0,3,10,24,26
 | 
				
			||||||
 | 
					    ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class OParser ( Parser ):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    grammarFileName = "O.g4"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    atn = ATNDeserializer().deserialize(serializedATN())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    sharedContextCache = PredictionContextCache()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    literalNames = [ "<INVALID>", "'('", "')'", "'-'", "'->'", "'<->'", 
 | 
				
			||||||
 | 
					                     "'|'", "'&'" ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>", 
 | 
				
			||||||
 | 
					                      "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>", 
 | 
				
			||||||
 | 
					                      "WS", "ATOM" ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    RULE_expr = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ruleNames =  [ "expr" ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    EOF = Token.EOF
 | 
				
			||||||
 | 
					    T__0=1
 | 
				
			||||||
 | 
					    T__1=2
 | 
				
			||||||
 | 
					    T__2=3
 | 
				
			||||||
 | 
					    T__3=4
 | 
				
			||||||
 | 
					    T__4=5
 | 
				
			||||||
 | 
					    T__5=6
 | 
				
			||||||
 | 
					    T__6=7
 | 
				
			||||||
 | 
					    WS=8
 | 
				
			||||||
 | 
					    ATOM=9
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
 | 
				
			||||||
 | 
					        super().__init__(input, output)
 | 
				
			||||||
 | 
					        self.checkVersion("4.10.1")
 | 
				
			||||||
 | 
					        self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
 | 
				
			||||||
 | 
					        self._predicates = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class ExprContext(ParserRuleContext):
 | 
				
			||||||
 | 
					        __slots__ = 'parser'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
 | 
				
			||||||
 | 
					            super().__init__(parent, invokingState)
 | 
				
			||||||
 | 
					            self.parser = parser
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def getRuleIndex(self):
 | 
				
			||||||
 | 
					            return OParser.RULE_expr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					     
 | 
				
			||||||
 | 
					        def copyFrom(self, ctx:ParserRuleContext):
 | 
				
			||||||
 | 
					            super().copyFrom(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class Log_negContext(ExprContext):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __init__(self, parser, ctx:ParserRuleContext): # actually a OParser.ExprContext
 | 
				
			||||||
 | 
					            super().__init__(parser)
 | 
				
			||||||
 | 
					            self.copyFrom(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def expr(self):
 | 
				
			||||||
 | 
					            return self.getTypedRuleContext(OParser.ExprContext,0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def accept(self, visitor:ParseTreeVisitor):
 | 
				
			||||||
 | 
					            if hasattr( visitor, "visitLog_neg" ):
 | 
				
			||||||
 | 
					                return visitor.visitLog_neg(self)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return visitor.visitChildren(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class ParenthContext(ExprContext):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __init__(self, parser, ctx:ParserRuleContext): # actually a OParser.ExprContext
 | 
				
			||||||
 | 
					            super().__init__(parser)
 | 
				
			||||||
 | 
					            self.copyFrom(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def expr(self):
 | 
				
			||||||
 | 
					            return self.getTypedRuleContext(OParser.ExprContext,0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def accept(self, visitor:ParseTreeVisitor):
 | 
				
			||||||
 | 
					            if hasattr( visitor, "visitParenth" ):
 | 
				
			||||||
 | 
					                return visitor.visitParenth(self)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return visitor.visitChildren(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class Log_orContext(ExprContext):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __init__(self, parser, ctx:ParserRuleContext): # actually a OParser.ExprContext
 | 
				
			||||||
 | 
					            super().__init__(parser)
 | 
				
			||||||
 | 
					            self.op = None # Token
 | 
				
			||||||
 | 
					            self.copyFrom(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def expr(self, i:int=None):
 | 
				
			||||||
 | 
					            if i is None:
 | 
				
			||||||
 | 
					                return self.getTypedRuleContexts(OParser.ExprContext)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return self.getTypedRuleContext(OParser.ExprContext,i)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def accept(self, visitor:ParseTreeVisitor):
 | 
				
			||||||
 | 
					            if hasattr( visitor, "visitLog_or" ):
 | 
				
			||||||
 | 
					                return visitor.visitLog_or(self)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return visitor.visitChildren(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class Log_implContext(ExprContext):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __init__(self, parser, ctx:ParserRuleContext): # actually a OParser.ExprContext
 | 
				
			||||||
 | 
					            super().__init__(parser)
 | 
				
			||||||
 | 
					            self.op = None # Token
 | 
				
			||||||
 | 
					            self.copyFrom(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def expr(self, i:int=None):
 | 
				
			||||||
 | 
					            if i is None:
 | 
				
			||||||
 | 
					                return self.getTypedRuleContexts(OParser.ExprContext)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return self.getTypedRuleContext(OParser.ExprContext,i)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def accept(self, visitor:ParseTreeVisitor):
 | 
				
			||||||
 | 
					            if hasattr( visitor, "visitLog_impl" ):
 | 
				
			||||||
 | 
					                return visitor.visitLog_impl(self)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return visitor.visitChildren(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class AtomContext(ExprContext):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __init__(self, parser, ctx:ParserRuleContext): # actually a OParser.ExprContext
 | 
				
			||||||
 | 
					            super().__init__(parser)
 | 
				
			||||||
 | 
					            self.copyFrom(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def ATOM(self):
 | 
				
			||||||
 | 
					            return self.getToken(OParser.ATOM, 0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def accept(self, visitor:ParseTreeVisitor):
 | 
				
			||||||
 | 
					            if hasattr( visitor, "visitAtom" ):
 | 
				
			||||||
 | 
					                return visitor.visitAtom(self)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return visitor.visitChildren(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class Log_iffContext(ExprContext):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __init__(self, parser, ctx:ParserRuleContext): # actually a OParser.ExprContext
 | 
				
			||||||
 | 
					            super().__init__(parser)
 | 
				
			||||||
 | 
					            self.op = None # Token
 | 
				
			||||||
 | 
					            self.copyFrom(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def expr(self, i:int=None):
 | 
				
			||||||
 | 
					            if i is None:
 | 
				
			||||||
 | 
					                return self.getTypedRuleContexts(OParser.ExprContext)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return self.getTypedRuleContext(OParser.ExprContext,i)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def accept(self, visitor:ParseTreeVisitor):
 | 
				
			||||||
 | 
					            if hasattr( visitor, "visitLog_iff" ):
 | 
				
			||||||
 | 
					                return visitor.visitLog_iff(self)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return visitor.visitChildren(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class Log_andContext(ExprContext):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __init__(self, parser, ctx:ParserRuleContext): # actually a OParser.ExprContext
 | 
				
			||||||
 | 
					            super().__init__(parser)
 | 
				
			||||||
 | 
					            self.op = None # Token
 | 
				
			||||||
 | 
					            self.copyFrom(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def expr(self, i:int=None):
 | 
				
			||||||
 | 
					            if i is None:
 | 
				
			||||||
 | 
					                return self.getTypedRuleContexts(OParser.ExprContext)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return self.getTypedRuleContext(OParser.ExprContext,i)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def accept(self, visitor:ParseTreeVisitor):
 | 
				
			||||||
 | 
					            if hasattr( visitor, "visitLog_and" ):
 | 
				
			||||||
 | 
					                return visitor.visitLog_and(self)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return visitor.visitChildren(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def expr(self, _p:int=0):
 | 
				
			||||||
 | 
					        _parentctx = self._ctx
 | 
				
			||||||
 | 
					        _parentState = self.state
 | 
				
			||||||
 | 
					        localctx = OParser.ExprContext(self, self._ctx, _parentState)
 | 
				
			||||||
 | 
					        _prevctx = localctx
 | 
				
			||||||
 | 
					        _startState = 0
 | 
				
			||||||
 | 
					        self.enterRecursionRule(localctx, 0, self.RULE_expr, _p)
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            self.enterOuterAlt(localctx, 1)
 | 
				
			||||||
 | 
					            self.state = 10
 | 
				
			||||||
 | 
					            self._errHandler.sync(self)
 | 
				
			||||||
 | 
					            token = self._input.LA(1)
 | 
				
			||||||
 | 
					            if token in [OParser.T__0]:
 | 
				
			||||||
 | 
					                localctx = OParser.ParenthContext(self, localctx)
 | 
				
			||||||
 | 
					                self._ctx = localctx
 | 
				
			||||||
 | 
					                _prevctx = localctx
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                self.state = 3
 | 
				
			||||||
 | 
					                self.match(OParser.T__0)
 | 
				
			||||||
 | 
					                self.state = 4
 | 
				
			||||||
 | 
					                self.expr(0)
 | 
				
			||||||
 | 
					                self.state = 5
 | 
				
			||||||
 | 
					                self.match(OParser.T__1)
 | 
				
			||||||
 | 
					                pass
 | 
				
			||||||
 | 
					            elif token in [OParser.T__2]:
 | 
				
			||||||
 | 
					                localctx = OParser.Log_negContext(self, localctx)
 | 
				
			||||||
 | 
					                self._ctx = localctx
 | 
				
			||||||
 | 
					                _prevctx = localctx
 | 
				
			||||||
 | 
					                self.state = 7
 | 
				
			||||||
 | 
					                self.match(OParser.T__2)
 | 
				
			||||||
 | 
					                self.state = 8
 | 
				
			||||||
 | 
					                self.expr(6)
 | 
				
			||||||
 | 
					                pass
 | 
				
			||||||
 | 
					            elif token in [OParser.ATOM]:
 | 
				
			||||||
 | 
					                localctx = OParser.AtomContext(self, localctx)
 | 
				
			||||||
 | 
					                self._ctx = localctx
 | 
				
			||||||
 | 
					                _prevctx = localctx
 | 
				
			||||||
 | 
					                self.state = 9
 | 
				
			||||||
 | 
					                self.match(OParser.ATOM)
 | 
				
			||||||
 | 
					                pass
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                raise NoViableAltException(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            self._ctx.stop = self._input.LT(-1)
 | 
				
			||||||
 | 
					            self.state = 26
 | 
				
			||||||
 | 
					            self._errHandler.sync(self)
 | 
				
			||||||
 | 
					            _alt = self._interp.adaptivePredict(self._input,2,self._ctx)
 | 
				
			||||||
 | 
					            while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
 | 
				
			||||||
 | 
					                if _alt==1:
 | 
				
			||||||
 | 
					                    if self._parseListeners is not None:
 | 
				
			||||||
 | 
					                        self.triggerExitRuleEvent()
 | 
				
			||||||
 | 
					                    _prevctx = localctx
 | 
				
			||||||
 | 
					                    self.state = 24
 | 
				
			||||||
 | 
					                    self._errHandler.sync(self)
 | 
				
			||||||
 | 
					                    la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
 | 
				
			||||||
 | 
					                    if la_ == 1:
 | 
				
			||||||
 | 
					                        localctx = OParser.Log_implContext(self, OParser.ExprContext(self, _parentctx, _parentState))
 | 
				
			||||||
 | 
					                        self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
 | 
				
			||||||
 | 
					                        self.state = 12
 | 
				
			||||||
 | 
					                        if not self.precpred(self._ctx, 5):
 | 
				
			||||||
 | 
					                            from antlr4.error.Errors import FailedPredicateException
 | 
				
			||||||
 | 
					                            raise FailedPredicateException(self, "self.precpred(self._ctx, 5)")
 | 
				
			||||||
 | 
					                        self.state = 13
 | 
				
			||||||
 | 
					                        localctx.op = self.match(OParser.T__3)
 | 
				
			||||||
 | 
					                        self.state = 14
 | 
				
			||||||
 | 
					                        self.expr(6)
 | 
				
			||||||
 | 
					                        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    elif la_ == 2:
 | 
				
			||||||
 | 
					                        localctx = OParser.Log_iffContext(self, OParser.ExprContext(self, _parentctx, _parentState))
 | 
				
			||||||
 | 
					                        self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
 | 
				
			||||||
 | 
					                        self.state = 15
 | 
				
			||||||
 | 
					                        if not self.precpred(self._ctx, 4):
 | 
				
			||||||
 | 
					                            from antlr4.error.Errors import FailedPredicateException
 | 
				
			||||||
 | 
					                            raise FailedPredicateException(self, "self.precpred(self._ctx, 4)")
 | 
				
			||||||
 | 
					                        self.state = 16
 | 
				
			||||||
 | 
					                        localctx.op = self.match(OParser.T__4)
 | 
				
			||||||
 | 
					                        self.state = 17
 | 
				
			||||||
 | 
					                        self.expr(5)
 | 
				
			||||||
 | 
					                        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    elif la_ == 3:
 | 
				
			||||||
 | 
					                        localctx = OParser.Log_orContext(self, OParser.ExprContext(self, _parentctx, _parentState))
 | 
				
			||||||
 | 
					                        self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
 | 
				
			||||||
 | 
					                        self.state = 18
 | 
				
			||||||
 | 
					                        if not self.precpred(self._ctx, 3):
 | 
				
			||||||
 | 
					                            from antlr4.error.Errors import FailedPredicateException
 | 
				
			||||||
 | 
					                            raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
 | 
				
			||||||
 | 
					                        self.state = 19
 | 
				
			||||||
 | 
					                        localctx.op = self.match(OParser.T__5)
 | 
				
			||||||
 | 
					                        self.state = 20
 | 
				
			||||||
 | 
					                        self.expr(4)
 | 
				
			||||||
 | 
					                        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    elif la_ == 4:
 | 
				
			||||||
 | 
					                        localctx = OParser.Log_andContext(self, OParser.ExprContext(self, _parentctx, _parentState))
 | 
				
			||||||
 | 
					                        self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
 | 
				
			||||||
 | 
					                        self.state = 21
 | 
				
			||||||
 | 
					                        if not self.precpred(self._ctx, 2):
 | 
				
			||||||
 | 
					                            from antlr4.error.Errors import FailedPredicateException
 | 
				
			||||||
 | 
					                            raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
 | 
				
			||||||
 | 
					                        self.state = 22
 | 
				
			||||||
 | 
					                        localctx.op = self.match(OParser.T__6)
 | 
				
			||||||
 | 
					                        self.state = 23
 | 
				
			||||||
 | 
					                        self.expr(3)
 | 
				
			||||||
 | 
					                        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					             
 | 
				
			||||||
 | 
					                self.state = 28
 | 
				
			||||||
 | 
					                self._errHandler.sync(self)
 | 
				
			||||||
 | 
					                _alt = self._interp.adaptivePredict(self._input,2,self._ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        except RecognitionException as re:
 | 
				
			||||||
 | 
					            localctx.exception = re
 | 
				
			||||||
 | 
					            self._errHandler.reportError(self, re)
 | 
				
			||||||
 | 
					            self._errHandler.recover(self, re)
 | 
				
			||||||
 | 
					        finally:
 | 
				
			||||||
 | 
					            self.unrollRecursionContexts(_parentctx)
 | 
				
			||||||
 | 
					        return localctx
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
 | 
				
			||||||
 | 
					        if self._predicates == None:
 | 
				
			||||||
 | 
					            self._predicates = dict()
 | 
				
			||||||
 | 
					        self._predicates[0] = self.expr_sempred
 | 
				
			||||||
 | 
					        pred = self._predicates.get(ruleIndex, None)
 | 
				
			||||||
 | 
					        if pred is None:
 | 
				
			||||||
 | 
					            raise Exception("No predicate with index:" + str(ruleIndex))
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            return pred(localctx, predIndex)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def expr_sempred(self, localctx:ExprContext, predIndex:int):
 | 
				
			||||||
 | 
					            if predIndex == 0:
 | 
				
			||||||
 | 
					                return self.precpred(self._ctx, 5)
 | 
				
			||||||
 | 
					         
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            if predIndex == 1:
 | 
				
			||||||
 | 
					                return self.precpred(self._ctx, 4)
 | 
				
			||||||
 | 
					         
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            if predIndex == 2:
 | 
				
			||||||
 | 
					                return self.precpred(self._ctx, 3)
 | 
				
			||||||
 | 
					         
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            if predIndex == 3:
 | 
				
			||||||
 | 
					                return self.precpred(self._ctx, 2)
 | 
				
			||||||
 | 
					         
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
							
								
								
									
										48
									
								
								logic/grammars/OVisitor.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								logic/grammars/OVisitor.py
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,48 @@
 | 
				
			||||||
 | 
					# Generated from O.g4 by ANTLR 4.10.1
 | 
				
			||||||
 | 
					from antlr4 import *
 | 
				
			||||||
 | 
					if __name__ is not None and "." in __name__:
 | 
				
			||||||
 | 
					    from .OParser import OParser
 | 
				
			||||||
 | 
					else:
 | 
				
			||||||
 | 
					    from OParser import OParser
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# This class defines a complete generic visitor for a parse tree produced by OParser.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class OVisitor(ParseTreeVisitor):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Visit a parse tree produced by OParser#log_neg.
 | 
				
			||||||
 | 
					    def visitLog_neg(self, ctx:OParser.Log_negContext):
 | 
				
			||||||
 | 
					        return self.visitChildren(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Visit a parse tree produced by OParser#parenth.
 | 
				
			||||||
 | 
					    def visitParenth(self, ctx:OParser.ParenthContext):
 | 
				
			||||||
 | 
					        return self.visitChildren(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Visit a parse tree produced by OParser#log_or.
 | 
				
			||||||
 | 
					    def visitLog_or(self, ctx:OParser.Log_orContext):
 | 
				
			||||||
 | 
					        return self.visitChildren(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Visit a parse tree produced by OParser#log_impl.
 | 
				
			||||||
 | 
					    def visitLog_impl(self, ctx:OParser.Log_implContext):
 | 
				
			||||||
 | 
					        return self.visitChildren(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Visit a parse tree produced by OParser#atom.
 | 
				
			||||||
 | 
					    def visitAtom(self, ctx:OParser.AtomContext):
 | 
				
			||||||
 | 
					        return self.visitChildren(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Visit a parse tree produced by OParser#log_iff.
 | 
				
			||||||
 | 
					    def visitLog_iff(self, ctx:OParser.Log_iffContext):
 | 
				
			||||||
 | 
					        return self.visitChildren(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Visit a parse tree produced by OParser#log_and.
 | 
				
			||||||
 | 
					    def visitLog_and(self, ctx:OParser.Log_andContext):
 | 
				
			||||||
 | 
					        return self.visitChildren(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					del OParser
 | 
				
			||||||
							
								
								
									
										0
									
								
								logic/grammars/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								logic/grammars/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										
											BIN
										
									
								
								logic/grammars/__pycache__/OLexer.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								logic/grammars/__pycache__/OLexer.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								logic/grammars/__pycache__/OParser.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								logic/grammars/__pycache__/OParser.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								logic/grammars/__pycache__/__init__.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								logic/grammars/__pycache__/__init__.cpython-39.pyc
									
										
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										104
									
								
								logic/logic.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										104
									
								
								logic/logic.py
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,104 @@
 | 
				
			||||||
 | 
					from abc import abstractmethod
 | 
				
			||||||
 | 
					from dataclasses import dataclass
 | 
				
			||||||
 | 
					from itertools import product
 | 
				
			||||||
 | 
					from typing import Dict, Iterable, Set
 | 
				
			||||||
 | 
					from antlr4 import ParseTreeVisitor, InputStream, CommonTokenStream
 | 
				
			||||||
 | 
					from logic.grammars.OLexer import OLexer
 | 
				
			||||||
 | 
					from logic.grammars.OParser import OParser
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class Expr:
 | 
				
			||||||
 | 
					    @abstractmethod
 | 
				
			||||||
 | 
					    def eval(self, assignment: Dict[str, bool]):
 | 
				
			||||||
 | 
					        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@dataclass
 | 
				
			||||||
 | 
					class AtomExpr(Expr):
 | 
				
			||||||
 | 
					    name: str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def eval(self, assignment: Dict[str, bool]):
 | 
				
			||||||
 | 
					        return assignment[self.name]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@dataclass
 | 
				
			||||||
 | 
					class NotExpr(Expr):
 | 
				
			||||||
 | 
					    expr: Expr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def eval(self, assignment: Dict[str, bool]):
 | 
				
			||||||
 | 
					        return not self.expr.eval(assignment)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@dataclass
 | 
				
			||||||
 | 
					class BinExpr(Expr):
 | 
				
			||||||
 | 
					    op: str
 | 
				
			||||||
 | 
					    left: Expr
 | 
				
			||||||
 | 
					    right: Expr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def eval(self, assignment: Dict[str, bool]):
 | 
				
			||||||
 | 
					        if self.op == "|":
 | 
				
			||||||
 | 
					            return self.left.eval(assignment) or self.right.eval(assignment)
 | 
				
			||||||
 | 
					        if self.op == "&":
 | 
				
			||||||
 | 
					            return self.left.eval(assignment) and self.right.eval(assignment)
 | 
				
			||||||
 | 
					        if self.op == "->":
 | 
				
			||||||
 | 
					            return (not self.left.eval(assignment)) or self.right.eval(assignment)
 | 
				
			||||||
 | 
					        if self.op == "<->":
 | 
				
			||||||
 | 
					            return self.left.eval(assignment) == self.right.eval(assignment)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@dataclass
 | 
				
			||||||
 | 
					class Formula:
 | 
				
			||||||
 | 
					    alphabet: Set[str]
 | 
				
			||||||
 | 
					    root: Expr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def models(self) -> Iterable[Iterable[str]]:
 | 
				
			||||||
 | 
					        alphabet = tuple(self.alphabet)
 | 
				
			||||||
 | 
					        for bools in product((True, False), repeat=len(alphabet)):
 | 
				
			||||||
 | 
					            assignment =  {atom: v for atom, v in zip(alphabet, bools)}
 | 
				
			||||||
 | 
					            if self.root.eval(assignment):
 | 
				
			||||||
 | 
					                yield assignment
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class FormulaBuilder(ParseTreeVisitor):
 | 
				
			||||||
 | 
					    alphabet: Set[str]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __init__(self) -> None:
 | 
				
			||||||
 | 
					        self.alphabet = set()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def visitParenth(self, ctx: OParser.ParenthContext):
 | 
				
			||||||
 | 
					        return self.visit(ctx.expr())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def visit_bin_op(self, ctx):
 | 
				
			||||||
 | 
					        return BinExpr(ctx.op.text, self.visit(ctx.expr(0)), self.visit(ctx.expr(1)))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def visitLog_neg(self, ctx:OParser.Log_negContext):
 | 
				
			||||||
 | 
					        return NotExpr(self.visit(ctx.expr()))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def visitLog_or(self, ctx: OParser.Log_orContext):
 | 
				
			||||||
 | 
					        return self.visit_bin_op(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def visitLog_impl(self, ctx: OParser.Log_implContext):
 | 
				
			||||||
 | 
					        return self.visit_bin_op(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def visitLog_iff(self, ctx: OParser.Log_iffContext):
 | 
				
			||||||
 | 
					        return self.visit_bin_op(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def visitLog_and(self, ctx: OParser.Log_andContext):
 | 
				
			||||||
 | 
					        return self.visit_bin_op(ctx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def visitAtom(self, ctx: OParser.AtomContext):
 | 
				
			||||||
 | 
					        text = ctx.getText()
 | 
				
			||||||
 | 
					        self.alphabet.add(text)
 | 
				
			||||||
 | 
					        return AtomExpr(text)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def build(self, tree):
 | 
				
			||||||
 | 
					        expr = self.visit(tree)
 | 
				
			||||||
 | 
					        return Formula(self.alphabet, expr)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def parse(text: str):
 | 
				
			||||||
 | 
					    input_stream = InputStream(text)
 | 
				
			||||||
 | 
					    lexer = OLexer(input_stream)
 | 
				
			||||||
 | 
					    stream = CommonTokenStream(lexer)
 | 
				
			||||||
 | 
					    parser = OParser(stream)
 | 
				
			||||||
 | 
					    tree = parser.expr()
 | 
				
			||||||
 | 
					    formula = FormulaBuilder().build(tree)
 | 
				
			||||||
 | 
					    return formula
 | 
				
			||||||
| 
						 | 
					@ -1,14 +1,14 @@
 | 
				
			||||||
from typing import Iterable, Set, Union
 | 
					from typing import Dict, Iterable, Set, Union
 | 
				
			||||||
 | 
					
 | 
				
			||||||
models = None
 | 
					models = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
DL_ATOMS = "abc"
 | 
					DL_ATOMS = "abc"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def set_models(alphabet: Iterable[str], v: Iterable[Iterable[str]]):
 | 
					def set_models(alphabet: Iterable[str], v: Iterable[Dict[str, bool]]):
 | 
				
			||||||
    global models, DL_ATOMS
 | 
					    global models, DL_ATOMS
 | 
				
			||||||
    DL_ATOMS = "".join(alphabet)
 | 
					    DL_ATOMS = "".join(alphabet)
 | 
				
			||||||
    models = tuple(set(model) for model in v)
 | 
					    models = tuple(set(atom for atom, v in model.items() if v) for model in v)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def propagate(atoms: Set[str]) -> Set[str]:
 | 
					def propagate(atoms: Set[str]) -> Set[str]:
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										1
									
								
								programs/a.ont
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								programs/a.ont
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1 @@
 | 
				
			||||||
 | 
					((a | -a) & (b | -b))
 | 
				
			||||||
| 
						 | 
					@ -1,7 +0,0 @@
 | 
				
			||||||
"ab"
 | 
					 | 
				
			||||||
(
 | 
					 | 
				
			||||||
"",
 | 
					 | 
				
			||||||
"a",
 | 
					 | 
				
			||||||
"b",
 | 
					 | 
				
			||||||
"ab",
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
							
								
								
									
										1
									
								
								programs/b.ont
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								programs/b.ont
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1 @@
 | 
				
			||||||
 | 
					a | b
 | 
				
			||||||
| 
						 | 
					@ -1,6 +0,0 @@
 | 
				
			||||||
"ab"
 | 
					 | 
				
			||||||
(
 | 
					 | 
				
			||||||
"a",
 | 
					 | 
				
			||||||
"b",
 | 
					 | 
				
			||||||
"ab",
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
							
								
								
									
										3
									
								
								programs/c.lp
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								programs/c.lp
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,3 @@
 | 
				
			||||||
 | 
					a, b.
 | 
				
			||||||
 | 
					a :- x.
 | 
				
			||||||
 | 
					b :- y.
 | 
				
			||||||
							
								
								
									
										1
									
								
								programs/c.ont
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								programs/c.ont
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1 @@
 | 
				
			||||||
 | 
					(b -> x) & (a -> y)
 | 
				
			||||||
| 
						 | 
					@ -46,6 +46,11 @@ class OntologyPropagator:
 | 
				
			||||||
        self.symbolic_atoms_inv = {v: k for k, v in self.symbolic_atoms.items()}
 | 
					        self.symbolic_atoms_inv = {v: k for k, v in self.symbolic_atoms.items()}
 | 
				
			||||||
        self.theory_atoms_inv = {v: k for k, v in self.theory_atoms.items()}
 | 
					        self.theory_atoms_inv = {v: k for k, v in self.theory_atoms.items()}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Make false always false
 | 
				
			||||||
 | 
					        false_lit = self.theory_atoms_inv["false"]
 | 
				
			||||||
 | 
					        init.add_clause([-false_lit])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Might only need to watch just theory atoms / just symbol atoms but for now
 | 
					        # Might only need to watch just theory atoms / just symbol atoms but for now
 | 
				
			||||||
        # watching everything is easier
 | 
					        # watching everything is easier
 | 
				
			||||||
        for lit in chain(self.symbolic_atoms, self.theory_atoms):
 | 
					        for lit in chain(self.symbolic_atoms, self.theory_atoms):
 | 
				
			||||||
| 
						 | 
					@ -165,7 +170,8 @@ class OntologyPropagator:
 | 
				
			||||||
        if shrink is None:
 | 
					        if shrink is None:
 | 
				
			||||||
            self.conflict(pcontrol)
 | 
					            self.conflict(pcontrol)
 | 
				
			||||||
            return
 | 
					            return
 | 
				
			||||||
        shrink = tuple(self.theory_atoms_inv[atom] for atom in shrink)
 | 
					        # Theory atom might not be present if it was removed by clingo for some reason...
 | 
				
			||||||
 | 
					        shrink = tuple(self.theory_atoms_inv[atom] for atom in shrink if atom in self.theory_atoms)
 | 
				
			||||||
        if any(self.assignment.get(abs(lit)) for lit in shrink):
 | 
					        if any(self.assignment.get(abs(lit)) for lit in shrink):
 | 
				
			||||||
            self.conflict(pcontrol)
 | 
					            self.conflict(pcontrol)
 | 
				
			||||||
            return
 | 
					            return
 | 
				
			||||||
| 
						 | 
					@ -220,8 +226,11 @@ def add_external_atoms(program: str) -> str:
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
    # Using .signatures here because symbols is unreliable
 | 
					    # Using .signatures here because symbols is unreliable
 | 
				
			||||||
    # E.g. a rule with the single rule `a :- a.` will not generate a symbol for an atom
 | 
					    # E.g. a rule with the single rule `a :- a.` will not generate a symbol for an atom
 | 
				
			||||||
 | 
					    # Dummy rules of the form atom :- &o{false} must be added for atoms that do not appear 
 | 
				
			||||||
 | 
					    # In the head of a rule as Clingo may decide to unify these with theory atoms for whatever reason
 | 
				
			||||||
 | 
					    # This seems to be a fix for now
 | 
				
			||||||
    external_atoms = "\n".join(
 | 
					    external_atoms = "\n".join(
 | 
				
			||||||
        f"{atom} :- &o{{{atom}}}."
 | 
					        f"{atom} :- &o{{{atom}}}. {atom} :- &o{{false}}."
 | 
				
			||||||
        for atom in (sig for sig, _, _ in control.symbolic_atoms.signatures)
 | 
					        for atom in (sig for sig, _, _ in control.symbolic_atoms.signatures)
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    return theory_grammar + program + "\n" + external_atoms
 | 
					    return theory_grammar + program + "\n" + external_atoms
 | 
				
			||||||
| 
						 | 
					@ -259,15 +268,17 @@ def solve(program: str, O_alphabet: Iterable[str], O_models: Iterable[Iterable[s
 | 
				
			||||||
def main():
 | 
					def main():
 | 
				
			||||||
    from sys import argv
 | 
					    from sys import argv
 | 
				
			||||||
    from os.path import splitext
 | 
					    from os.path import splitext
 | 
				
			||||||
 | 
					    from logic import logic
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    assert len(argv) == 2, "Please provide an .lp file as an argument"
 | 
					    assert len(argv) == 2, "Please provide an .lp file as an argument"
 | 
				
			||||||
    lp_filename = argv[1]
 | 
					    lp_filename = argv[1]
 | 
				
			||||||
    models_filename = splitext(lp_filename)[0] + ".py"
 | 
					    models_filename = splitext(lp_filename)[0] + ".ont"
 | 
				
			||||||
    with open(lp_filename, "rt", encoding="utf8") as lp_fo:
 | 
					    with open(lp_filename, "rt", encoding="utf8") as lp_fo:
 | 
				
			||||||
        with open(models_filename, "rt", encoding="utf8") as models_fo:
 | 
					        with open(models_filename, "rt", encoding="utf8") as models_fo:
 | 
				
			||||||
            alphabet = eval(models_fo.readline())
 | 
					            models_text = models_fo.read()
 | 
				
			||||||
            models = eval(models_fo.read())
 | 
					            formula = logic.parse(models_text)
 | 
				
			||||||
            solve(lp_fo.read(), alphabet, models)
 | 
					            models = tuple(formula.models())
 | 
				
			||||||
 | 
					            solve(lp_fo.read(), formula.alphabet, models)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
if __name__ == "__main__":
 | 
					if __name__ == "__main__":
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										2
									
								
								test_output/a.accept
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								test_output/a.accept
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,2 @@
 | 
				
			||||||
 | 
					ALL FINISHED, ALL ANSWER SETS:
 | 
				
			||||||
 | 
					b a
 | 
				
			||||||
							
								
								
									
										2
									
								
								test_output/a.out
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								test_output/a.out
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,2 @@
 | 
				
			||||||
 | 
					ALL FINISHED, ALL ANSWER SETS:
 | 
				
			||||||
 | 
					b a
 | 
				
			||||||
							
								
								
									
										2
									
								
								test_output/b.accept
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								test_output/b.accept
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,2 @@
 | 
				
			||||||
 | 
					ALL FINISHED, ALL ANSWER SETS:
 | 
				
			||||||
 | 
					b a
 | 
				
			||||||
							
								
								
									
										2
									
								
								test_output/b.out
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								test_output/b.out
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,2 @@
 | 
				
			||||||
 | 
					ALL FINISHED, ALL ANSWER SETS:
 | 
				
			||||||
 | 
					b a
 | 
				
			||||||
							
								
								
									
										2
									
								
								test_output/c.accept
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								test_output/c.accept
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,2 @@
 | 
				
			||||||
 | 
					ALL FINISHED, ALL ANSWER SETS:
 | 
				
			||||||
 | 
					a b x y
 | 
				
			||||||
							
								
								
									
										2
									
								
								test_output/c.out
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								test_output/c.out
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
					@ -0,0 +1,2 @@
 | 
				
			||||||
 | 
					ALL FINISHED, ALL ANSWER SETS:
 | 
				
			||||||
 | 
					a b x y
 | 
				
			||||||
							
								
								
									
										28
									
								
								tests.py
									
										
									
									
									
										
										
										Normal file → Executable file
									
								
							
							
						
						
									
										28
									
								
								tests.py
									
										
									
									
									
										
										
										Normal file → Executable file
									
								
							| 
						 | 
					@ -1,10 +1,11 @@
 | 
				
			||||||
#!/usr/bin/env python3
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from glob import glob
 | 
					from glob import glob
 | 
				
			||||||
from os import system
 | 
					from os import remove, system
 | 
				
			||||||
from functools import partial
 | 
					from functools import partial
 | 
				
			||||||
 | 
					from pathlib import Path
 | 
				
			||||||
from sys import argv, stderr
 | 
					from sys import argv, stderr
 | 
				
			||||||
from os.path import splitext
 | 
					from os.path import splitext, basename
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
eprint = partial(print, file=stderr)
 | 
					eprint = partial(print, file=stderr)
 | 
				
			||||||
| 
						 | 
					@ -14,7 +15,26 @@ if len(argv) >= 2 and argv[1] == "-a":
 | 
				
			||||||
else:
 | 
					else:
 | 
				
			||||||
    accept = False
 | 
					    accept = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
system("mkdir -p tests")
 | 
					TEST_FOLDER = "test_output"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					system(f"mkdir -p {TEST_FOLDER}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					system(f"rm -rf {TEST_FOLDER}/*.out")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
for lp in glob("programs/*.lp"):
 | 
					for lp in glob("programs/*.lp"):
 | 
				
			||||||
    base = splitext(lp)[0]
 | 
					    base = basename(splitext(lp)[0])
 | 
				
			||||||
 | 
					    out_path = Path() / TEST_FOLDER / f"{base}.out"
 | 
				
			||||||
 | 
					    accept_path = Path() / TEST_FOLDER / f"{base}.accept"
 | 
				
			||||||
 | 
					    diff_path = Path() / TEST_FOLDER / f"{base}.diff"
 | 
				
			||||||
 | 
					    system(f"python3 propagator.py {lp} > {out_path}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if accept:
 | 
				
			||||||
 | 
					        system(f"cp {out_path} {accept_path}")
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        system(f"diff {out_path} {accept_path} > {diff_path}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					for diff in glob(f"{TEST_FOLDER}/*.diff"):
 | 
				
			||||||
 | 
					    with open(diff, "tr", encoding="utf8") as diff_fo:
 | 
				
			||||||
 | 
					        if diff_fo.read(1) == "":
 | 
				
			||||||
 | 
					            remove(diff)
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
	Add table
		
		Reference in a new issue