Yoav Artzi avatar Yoav Artzi committed 3571658

Code organization + validation learners + bug fixes

Comments (0)

Files changed (223)

 **Bibtex:**
 
     @article{artzi2013uwspf,
-        title={UW SPF: The University of Washington Semantic Parsing Framework},
+        title={{UW SPF: The University of Washington Semantic Parsing Framework}},
         author={Artzi, Yoav and Zettlemoyer, Luke},
         year={2013}
     }
 src.tinyutils=tinyutils/src
 src.ccg=ccg/src
+src.ccg.lexicon=ccg.lexicon/src
+src.ccg.lexicon.factored.lambda=ccg.lexicon.factored.lambda/src
 src.data=data/src
 src.data.lexicalgen=data.lexicalgen/src
 src.data.lexicalgen.singlesentence=data.lexicalgen.singlesentence/src
 src.datasinglesentence=data.singlesentence/src
 src.explat=explat/src
 src.learn=learn/src
+src.learn.validation=learn.validation/src
 src.learn.situated=learn.situated/src
 src.learn.weakp=learn.weakp/src
 src.learn.ubl=learn.ubl/src
 src.parser.ccg.factoredlex=parser.ccg.factoredlex/src
 src.parser.ccg.features.basic=parser.ccg.features.basic/src
 src.parser.ccg.features.lambda=parser.ccg.features.lambda/src
-src.parser.ccg.genlex=parser.ccg.genlex/src
 src.parser.ccg.cky.genlex=parser.ccg.cky.genlex/src
 src.parser.ccg.rules=parser.ccg.rules/src
 src.parser.ccg.rules.lambda=parser.ccg.rules.lambda/src
 src.parser.ccg.rules.coordination.lambda = parser.ccg.rules.coordination.lambda/src
 src.learn.simple = learn.simple/src
 src.learn.simple.genlex = learn.simple.genlex/src
+src.storage = storage/src
+src.genlex.ccg = genlex.ccg/src
+src.genlex.ccg.template = genlex.ccg.template/src
 mainclass=edu.uw.cs.lil.tiny.geoquery.Geo880Dev
     </description>
 	<!-- set global properties for this build -->
 	<property file="build.properties" />
-	<property name="version" value="1.1" />
+	<property name="version" value="1.2" />
 	<property name="build" location="build" />
 	<property name="build.src" location="build.src" />
 	<property name="dist" location="dist" />
 
 	<path id="classpath">
 		<fileset dir="${lib}">
-			<include name="**/*.jar" />
+			<include name="*.jar" />
 		</fileset>
 	</path>
 
 		<mkdir dir="${build.src}" />
 		<copy todir="${build.src}" >
 			<fileset dir="${src.tinyutils}" includes="**/*.java" />
+			<fileset dir="${src.storage}" includes="**/*.java" />
 			<fileset dir="${src.ccg}" includes="**/*.java" />
+			<fileset dir="${src.ccg.lexicon}" includes="**/*.java" />
+			<fileset dir="${src.ccg.lexicon.factored.lambda}" includes="**/*.java" />
 			<fileset dir="${src.data}" includes="**/*.java" />
+			<fileset dir="${src.genlex.ccg}" includes="**/*.java" />
+			<fileset dir="${src.genlex.ccg.template}" includes="**/*.java" />
 			<fileset dir="${src.exec}" includes="**/*.java" />
 			<fileset dir="${src.data.lexicalgen}" includes="**/*.java" />
 			<fileset dir="${src.data.lexicalgen.singlesentence}" includes="**/*.java" />
 			<fileset dir="${src.datasinglesentence}" includes="**/*.java" />
 			<fileset dir="${src.explat}" includes="**/*.java" />
 			<fileset dir="${src.learn}" includes="**/*.java" />
+			<fileset dir="${src.learn.validation}" includes="**/*.java" />
 			<fileset dir="${src.learn.weakp}" includes="**/*.java" />
 			<fileset dir="${src.learn.ubl}" includes="**/*.java" />
 			<fileset dir="${src.learn.simple}" includes="**/*.java" />
 			<fileset dir="${src.parser.ccg.factoredlex}" includes="**/*.java" />
 			<fileset dir="${src.parser.ccg.features.basic}" includes="**/*.java" />
 			<fileset dir="${src.parser.ccg.features.lambda}" includes="**/*.java" />
-			<fileset dir="${src.parser.ccg.genlex}" includes="**/*.java" />
 			<fileset dir="${src.parser.ccg.cky.genlex}" includes="**/*.java" />
 			<fileset dir="${src.parser.ccg.rules}" includes="**/*.java" />
 			<fileset dir="${src.parser.ccg.rules.lambda}" includes="**/*.java" />

ccg.lexicon.factored.lambda/.classpath

+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="src" path="src"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/ccg"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/ccg.lexicon"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/storage"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/tinyutils"/>
+	<classpathentry combineaccessrules="false" kind="src" path="/mr.lambda"/>
+	<classpathentry kind="output" path="bin"/>
+</classpath>

ccg.lexicon.factored.lambda/.project

+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>ccg.lexicon.factored.lambda</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.eclipse.jdt.core.javabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+	</natures>
+</projectDescription>

ccg.lexicon.factored.lambda/src/edu/uw/cs/lil/tiny/ccg/lexicon/factored/lambda/FactoredLexicon.java

+/*******************************************************************************
+ * UW SPF - The University of Washington Semantic Parsing Framework
+ * <p>
+ * Copyright (C) 2013 Yoav Artzi
+ * <p>
+ * This program is free software; you can redistribute it and/or modify it under
+ * the terms of the GNU General Public License as published by the Free Software
+ * Foundation; either version 2 of the License, or any later version.
+ * <p>
+ * This program is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+ * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+ * details.
+ * <p>
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 51
+ * Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ ******************************************************************************/
+package edu.uw.cs.lil.tiny.ccg.lexicon.factored.lambda;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import edu.uw.cs.lil.tiny.ccg.categories.Category;
+import edu.uw.cs.lil.tiny.ccg.categories.ICategoryServices;
+import edu.uw.cs.lil.tiny.ccg.lexicon.ILexicon;
+import edu.uw.cs.lil.tiny.ccg.lexicon.LexicalEntry;
+import edu.uw.cs.lil.tiny.ccg.lexicon.Lexicon;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicalConstant;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicalExpression;
+import edu.uw.cs.lil.tiny.mr.language.type.Type;
+import edu.uw.cs.lil.tiny.storage.AbstractDecoderIntoFile;
+import edu.uw.cs.lil.tiny.storage.DecoderHelper;
+import edu.uw.cs.lil.tiny.storage.IDecoder;
+import edu.uw.cs.lil.tiny.utils.string.IStringFilter;
+import edu.uw.cs.lil.tiny.utils.string.StubStringFilter;
+import edu.uw.cs.utils.collections.ListUtils;
+import edu.uw.cs.utils.composites.Pair;
+
+public class FactoredLexicon implements ILexicon<LogicalExpression> {
+	public static final String							FACTORING_LEXICAL_ORIGIN	= "factoring";
+	
+	private final String								entriesOrigin;
+	
+	// lexemes are grouped by their strings, for quick indexing
+	private final Map<List<String>, Set<Lexeme>>		lexemes						= new HashMap<List<String>, Set<Lexeme>>();
+	
+	// templates are group by the types of their input arguments, for quick
+	// indexing
+	private final Map<List<Type>, Set<LexicalTemplate>>	templates					= new HashMap<List<Type>, Set<LexicalTemplate>>();
+	
+	public FactoredLexicon() {
+		this.entriesOrigin = FACTORING_LEXICAL_ORIGIN;
+	}
+	
+	public FactoredLexicon(Set<Lexeme> inputLexemes,
+			Set<LexicalTemplate> inputTemplates) {
+		this(inputLexemes, inputTemplates, FACTORING_LEXICAL_ORIGIN);
+	}
+	
+	public FactoredLexicon(Set<Lexeme> inputLexemes,
+			Set<LexicalTemplate> inputTemplates, String entriesOrigin) {
+		this.entriesOrigin = entriesOrigin;
+		for (final Lexeme lexeme : inputLexemes) {
+			addLexeme(lexeme);
+		}
+		for (final LexicalTemplate template : inputTemplates) {
+			addTemplate(template);
+		}
+		
+	}
+	
+	public static FactoredLexicalEntry factor(
+			LexicalEntry<LogicalExpression> entry) {
+		if (entry instanceof FactoredLexicalEntry) {
+			// Case already a factored lexical entry, cast and return
+			return (FactoredLexicalEntry) entry;
+		} else {
+			// we need to compute the maximal factoring and return it
+			return factor(entry, true, false, 0).get(0);
+		}
+	}
+	
+	public static List<FactoredLexicalEntry> factor(
+			final LexicalEntry<LogicalExpression> entry, boolean doMaximal,
+			boolean doPartial, int maxConstantsInPartial) {
+		
+		final List<Pair<List<LogicalConstant>, LexicalTemplate>> factoring = LexicalTemplate
+				.doFactoring(entry.getCategory(), doMaximal, doPartial,
+						maxConstantsInPartial, entry.getOrigin());
+		
+		return ListUtils
+				.map(factoring,
+						new ListUtils.Mapper<Pair<List<LogicalConstant>, LexicalTemplate>, FactoredLexicalEntry>() {
+							
+							@Override
+							public FactoredLexicalEntry process(
+									Pair<List<LogicalConstant>, LexicalTemplate> obj) {
+								return new FactoredLexicalEntry(entry
+										.getTokens(), entry.getCategory(),
+										new Lexeme(entry.getTokens(), obj
+												.first(), entry.getOrigin()),
+										obj.second(), entry.getOrigin());
+							}
+						});
+	}
+	
+	public static IDecoder<FactoredLexicon> getDecoder(
+			DecoderHelper<LogicalExpression> decoderHelper) {
+		return new Decoder(decoderHelper);
+	}
+	
+	@Override
+	public boolean add(LexicalEntry<LogicalExpression> entry) {
+		final FactoredLexicalEntry factoredEntry = factor(entry);
+		boolean added = false;
+		added |= addLexeme(factoredEntry.getLexeme());
+		added |= addTemplate(factoredEntry.getTemplate());
+		return added;
+	}
+	
+	@Override
+	public boolean addAll(Collection<LexicalEntry<LogicalExpression>> entries) {
+		for (final LexicalEntry<LogicalExpression> lex : entries) {
+			add(lex);
+		}
+		return true;
+	}
+	
+	@Override
+	public boolean addAll(ILexicon<LogicalExpression> lexicon) {
+		if (lexicon instanceof FactoredLexicon) {
+			final FactoredLexicon flex = (FactoredLexicon) lexicon;
+			lexemes.putAll(flex.lexemes);
+			templates.putAll(flex.templates);
+			return true;
+		}
+		return addAll(lexicon.toCollection());
+	}
+	
+	public void addEntriesFromFile(File file,
+			ICategoryServices<LogicalExpression> categoryServices, String origin) {
+		addEntriesFromFile(file, new StubStringFilter(), categoryServices,
+				origin);
+	}
+	
+	/**
+	 * Read entries from a file, one per line, of the form
+	 * 
+	 * <pre>
+	 *  Tokens  :-  Cat
+	 * </pre>
+	 */
+	@Override
+	public void addEntriesFromFile(File file, IStringFilter textFilter,
+			ICategoryServices<LogicalExpression> categoryServices, String origin) {
+		try {
+			final BufferedReader in = new BufferedReader(new FileReader(file));
+			int lineCounter = 0;
+			try {
+				String line;
+				// For each line in the file
+				while ((line = in.readLine()) != null) {
+					++lineCounter;
+					line = line.trim();
+					// Ignore blank lines and comments
+					if (!line.equals("") && !line.startsWith("//")) {
+						add(LexicalEntry.parse(line, textFilter,
+								categoryServices, origin));
+					}
+				}
+			} catch (final RuntimeException e) {
+				throw new RuntimeException(String.format(
+						"Reading of input file %s failed at line %d",
+						file.getName(), lineCounter), e);
+			} finally {
+				in.close();
+			}
+		} catch (final IOException e) {
+			throw new RuntimeException(e);
+		}
+	}
+	
+	@Override
+	public boolean contains(LexicalEntry<LogicalExpression> entry) {
+		final FactoredLexicalEntry factoring = factor(entry);
+		final Set<Lexeme> lexemeSet = lexemes.get(factoring.getLexeme()
+				.getTokens());
+		final Set<LexicalTemplate> templateSet = templates.get(factoring
+				.getTemplate().getTypeSignature());
+		
+		return lexemeSet != null && templateSet != null
+				&& lexemeSet.contains(factoring.getLexeme())
+				&& templateSet.contains(factoring.getTemplate());
+	}
+	
+	@Override
+	public FactoredLexicon copy() {
+		final FactoredLexicon newLexicon = new FactoredLexicon();
+		for (final Map.Entry<List<String>, Set<Lexeme>> lexemeIndex : lexemes
+				.entrySet()) {
+			newLexicon.lexemes.put(lexemeIndex.getKey(), new HashSet<Lexeme>(
+					lexemeIndex.getValue()));
+		}
+		for (final Map.Entry<List<Type>, Set<LexicalTemplate>> templateIndex : templates
+				.entrySet()) {
+			newLexicon.templates.put(templateIndex.getKey(),
+					new HashSet<LexicalTemplate>(templateIndex.getValue()));
+		}
+		
+		return newLexicon;
+	}
+	
+	@Override
+	public List<FactoredLexicalEntry> getLexEntries(List<String> tokens) {
+		final List<FactoredLexicalEntry> newLexicalEntries = new LinkedList<FactoredLexicalEntry>();
+		final Set<Lexeme> lexemeSet = lexemes.get(tokens);
+		if (lexemeSet == null) {
+			return Collections.emptyList();
+		}
+		for (final Lexeme lexeme : lexemeSet) {
+			final Set<LexicalTemplate> temps = templates.get(lexeme
+					.getTypeSignature());
+			for (final LexicalTemplate template : temps) {
+				final FactoredLexicalEntry lex = applyTemplate(template, lexeme);
+				if (lex != null) {
+					newLexicalEntries.add(lex);
+				}
+			}
+		}
+		return newLexicalEntries;
+	}
+	
+	@Override
+	public boolean retainAll(
+			Collection<LexicalEntry<LogicalExpression>> toKeepEntries) {
+		final FactoredLexicon factoredLexicon = new FactoredLexicon();
+		factoredLexicon.addAll(toKeepEntries);
+		return retainAll(factoredLexicon);
+	}
+	
+	@Override
+	public boolean retainAll(ILexicon<LogicalExpression> lexicon) {
+		if (lexicon instanceof FactoredLexicon) {
+			// Case factored lexicon, so should remove all lexemes and templates
+			// it doesn't include
+			final FactoredLexicon factoredLexicon = (FactoredLexicon) lexicon;
+			boolean somethingRemoved = false;
+			
+			// Remove lexemes
+			final Iterator<Entry<List<String>, Set<Lexeme>>> lexemeIterator = lexemes
+					.entrySet().iterator();
+			while (lexemeIterator.hasNext()) {
+				final Entry<List<String>, Set<Lexeme>> lexemeEntry = lexemeIterator
+						.next();
+				if (factoredLexicon.lexemes.containsKey(lexemeEntry.getKey())) {
+					// Case string seq. known, remove lexemes not present
+					somethingRemoved |= lexemeEntry.getValue().retainAll(
+							factoredLexicon.lexemes.get(lexemeEntry.getKey()));
+				} else {
+					// Case this string sequence is not present, remove all its
+					// lexemes
+					lexemeIterator.remove();
+					somethingRemoved = true;
+				}
+			}
+			
+			// Remove templates
+			final Iterator<Entry<List<Type>, Set<LexicalTemplate>>> templateIterator = templates
+					.entrySet().iterator();
+			while (templateIterator.hasNext()) {
+				final Entry<List<Type>, Set<LexicalTemplate>> templateEntry = templateIterator
+						.next();
+				if (factoredLexicon.templates.containsKey(templateEntry
+						.getKey())) {
+					// Case type signature present, remove all templates not
+					// present
+					somethingRemoved |= templateEntry.getValue().retainAll(
+							factoredLexicon.templates.get(templateEntry
+									.getKey()));
+				} else {
+					// Case type signature not present, remove all templates
+					templateIterator.remove();
+					somethingRemoved = true;
+				}
+			}
+			
+			return somethingRemoved;
+		} else {
+			return retainAll(lexicon.toCollection());
+		}
+	}
+	
+	@Override
+	public int size() {
+		int size = 0;
+		for (final Set<Lexeme> lexemeSet : lexemes.values()) {
+			for (final Lexeme lexeme : lexemeSet) {
+				size += templates.get(lexeme.getTypeSignature()).size();
+			}
+		}
+		return size;
+	}
+	
+	/**
+	 * WARNING: really inefficient to call this... should avoid at all costs if
+	 * lexicon is large
+	 * 
+	 * @return
+	 */
+	@Override
+	public Collection<LexicalEntry<LogicalExpression>> toCollection() {
+		final Set<LexicalEntry<LogicalExpression>> result = new HashSet<LexicalEntry<LogicalExpression>>();
+		for (final Set<Lexeme> lexemeSet : lexemes.values()) {
+			for (final Lexeme lexeme : lexemeSet) {
+				if (templates.containsKey(lexeme.getTypeSignature())) {
+					for (final LexicalTemplate template : templates.get(lexeme
+							.getTypeSignature())) {
+						final LexicalEntry<LogicalExpression> newLex = applyTemplate(
+								template, lexeme);
+						if (newLex != null) {
+							result.add(newLex);
+						}
+					}
+				}
+			}
+		}
+		return result;
+	}
+	
+	@Override
+	public String toString() {
+		
+		final StringBuilder ret = new StringBuilder();
+		ret.append("Lexemes:\n");
+		for (final Entry<List<String>, Set<Lexeme>> entry : lexemes.entrySet()) {
+			ret.append(entry.getKey());
+			ret.append("=");
+			ret.append(entry.getValue());
+			ret.append("\n");
+		}
+		ret.append("Templates:\n");
+		for (final Entry<List<Type>, Set<LexicalTemplate>> entry : templates
+				.entrySet()) {
+			ret.append(entry.getValue());
+			ret.append("\n");
+		}
+		return ret.toString();
+	}
+	
+	private boolean addLexeme(Lexeme lexeme) {
+		Set<Lexeme> lexemeSet = lexemes.get(lexeme.getTokens());
+		if (lexemeSet != null) {
+			return lexemeSet.add(lexeme);
+		} else {
+			lexemeSet = new HashSet<Lexeme>();
+			lexemeSet.add(lexeme);
+			lexemes.put(lexeme.getTokens(), lexemeSet);
+			return true;
+		}
+	}
+	
+	private boolean addTemplate(LexicalTemplate template) {
+		Set<LexicalTemplate> templateSet = templates.get(template
+				.getTypeSignature());
+		if (templateSet != null) {
+			return templateSet.add(template);
+		} else {
+			templateSet = new HashSet<LexicalTemplate>();
+			templateSet.add(template);
+			templates.put(template.getTypeSignature(), templateSet);
+			return true;
+		}
+	}
+	
+	private FactoredLexicalEntry applyTemplate(LexicalTemplate template,
+			Lexeme lexeme) {
+		final Category<LogicalExpression> newCategory = template
+				.makeCategory(lexeme);
+		if (newCategory == null) {
+			return null;
+		}
+		return new FactoredLexicalEntry(lexeme.getTokens(), newCategory,
+				lexeme, template, entriesOrigin);
+	}
+	
+	public static class FactoredLexicalEntry extends
+			LexicalEntry<LogicalExpression> {
+		
+		private final Lexeme			lexeme;
+		private final LexicalTemplate	template;
+		
+		private FactoredLexicalEntry(List<String> tokens,
+				Category<LogicalExpression> category, Lexeme lexeme,
+				LexicalTemplate template, String origin) {
+			super(tokens, category, origin);
+			this.lexeme = lexeme;
+			this.template = template;
+		}
+		
+		@Override
+		public LexicalEntry<LogicalExpression> cloneWithDifferentOrigin(
+				String newOrigin) {
+			return new FactoredLexicalEntry(super.getTokens(),
+					super.getCategory(), lexeme, template, newOrigin);
+		}
+		
+		@Override
+		public boolean equals(Object obj) {
+			if (this == obj) {
+				return true;
+			}
+			if (!super.equals(obj)) {
+				return false;
+			}
+			if (getClass() != obj.getClass()) {
+				return false;
+			}
+			final FactoredLexicalEntry other = (FactoredLexicalEntry) obj;
+			if (lexeme == null) {
+				if (other.lexeme != null) {
+					return false;
+				}
+			} else if (!lexeme.equals(other.lexeme)) {
+				return false;
+			}
+			if (template == null) {
+				if (other.template != null) {
+					return false;
+				}
+			} else if (!template.equals(other.template)) {
+				return false;
+			}
+			return true;
+		}
+		
+		public Lexeme getLexeme() {
+			return lexeme;
+		}
+		
+		public LexicalTemplate getTemplate() {
+			return template;
+		}
+		
+		@Override
+		public int hashCode() {
+			final int prime = 31;
+			int result = super.hashCode();
+			result = prime * result
+					+ ((lexeme == null) ? 0 : lexeme.hashCode());
+			result = prime * result
+					+ ((template == null) ? 0 : template.hashCode());
+			return result;
+		}
+		
+	}
+	
+	private static class Decoder extends
+			AbstractDecoderIntoFile<FactoredLexicon> {
+		
+		private static final int						VERSION	= 1;
+		
+		private final DecoderHelper<LogicalExpression>	decoderHelper;
+		
+		public Decoder(DecoderHelper<LogicalExpression> decoderHelper) {
+			super(FactoredLexicon.class);
+			this.decoderHelper = decoderHelper;
+		}
+		
+		@Override
+		public int getVersion() {
+			return VERSION;
+		}
+		
+		@Override
+		protected Map<String, String> createAttributesMap(FactoredLexicon object) {
+			// No special attributes
+			return new HashMap<String, String>();
+		}
+		
+		@Override
+		protected FactoredLexicon doDecode(Map<String, String> attributes,
+				Map<String, File> dependentFiles, BufferedReader reader)
+				throws IOException {
+			String line;
+			// First, read the Lexemes, one per line
+			final Set<Lexeme> lexemes = new HashSet<Lexeme>();
+			// Read the header of the map
+			readTextLine(reader);
+			while (!(line = readTextLine(reader)).equals("LEXEMES_END")) {
+				lexemes.add(Lexeme.parse(line,
+						decoderHelper.getCategoryServices(),
+						Lexicon.SAVED_LEXICON_ORIGIN));
+			}
+			// Second, read the lexical templates, one per line
+			final Set<LexicalTemplate> templates = new HashSet<LexicalTemplate>();
+			// Read the header of the map
+			readTextLine(reader);
+			while (!(line = readTextLine(reader)).equals("TEMPLATES_END")) {
+				templates.add(LexicalTemplate.parse(line,
+						decoderHelper.getCategoryServices(),
+						Lexicon.SAVED_LEXICON_ORIGIN));
+			}
+			return new FactoredLexicon(lexemes, templates);
+		}
+		
+		@Override
+		protected void doEncode(FactoredLexicon object, BufferedWriter writer)
+				throws IOException {
+			// First, write all of the lexemes
+			writer.write("LEXEMES_START\n");
+			for (final Set<Lexeme> lexemeSet : object.lexemes.values()) {
+				for (final Lexeme lexeme : lexemeSet) {
+					writer.write(lexeme.toString());
+					writer.write("\n");
+				}
+			}
+			writer.write("LEXEMES_END\n");
+			
+			// Next, write all of the templates
+			writer.write("TEMPLATES_START\n");
+			for (final Set<LexicalTemplate> templateSet : object.templates
+					.values()) {
+				for (final LexicalTemplate template : templateSet) {
+					writer.write(template.toString());
+					writer.write("\n");
+				}
+			}
+			writer.write("TEMPLATES_END\n");
+		}
+		
+		@Override
+		protected Map<String, File> encodeDependentFiles(
+				FactoredLexicon object, File directory, File parentFile)
+				throws IOException {
+			// No dependent files
+			return new HashMap<String, File>();
+		}
+		
+	}
+}

ccg.lexicon.factored.lambda/src/edu/uw/cs/lil/tiny/ccg/lexicon/factored/lambda/FactoredLexiconServices.java

+/*******************************************************************************
+ * UW SPF - The University of Washington Semantic Parsing Framework
+ * <p>
+ * Copyright (C) 2013 Yoav Artzi
+ * <p>
+ * This program is free software; you can redistribute it and/or modify it under
+ * the terms of the GNU General Public License as published by the Free Software
+ * Foundation; either version 2 of the License, or any later version.
+ * <p>
+ * This program is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+ * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+ * details.
+ * <p>
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 51
+ * Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ ******************************************************************************/
+package edu.uw.cs.lil.tiny.ccg.lexicon.factored.lambda;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import edu.uw.cs.lil.tiny.mr.lambda.LogicLanguageServices;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicalConstant;
+
+public class FactoredLexiconServices {
+	private static FactoredLexiconServices	INSTANCE			= new FactoredLexiconServices();
+	
+	private final Set<LogicalConstant>		unfactoredConstants	= new HashSet<LogicalConstant>();
+	
+	private FactoredLexiconServices() {
+	}
+	
+	public static boolean isFactorable(LogicalConstant constant) {
+		return INSTANCE.doIsFactorable(constant);
+	}
+	
+	public static void set(Set<LogicalConstant> unfactoredConstants) {
+		INSTANCE = new FactoredLexiconServices();
+		INSTANCE.addUnfactoredConstants(unfactoredConstants);
+	}
+	
+	private void addUnfactoredConstants(Set<LogicalConstant> constants) {
+		unfactoredConstants.addAll(constants);
+	}
+	
+	private boolean doIsFactorable(LogicalConstant constant) {
+		return !LogicLanguageServices.isCoordinationPredicate(constant)
+				&& !LogicLanguageServices.isArrayIndexPredicate(constant)
+				&& !LogicLanguageServices.isArraySubPredicate(constant)
+				&& !LogicLanguageServices.getTypeRepository().getIndexType()
+						.equals(constant.getType())
+				&& !unfactoredConstants.contains(constant);
+	}
+	
+}

ccg.lexicon.factored.lambda/src/edu/uw/cs/lil/tiny/ccg/lexicon/factored/lambda/Lexeme.java

+/*******************************************************************************
+ * UW SPF - The University of Washington Semantic Parsing Framework
+ * <p>
+ * Copyright (C) 2013 Yoav Artzi
+ * <p>
+ * This program is free software; you can redistribute it and/or modify it under
+ * the terms of the GNU General Public License as published by the Free Software
+ * Foundation; either version 2 of the License, or any later version.
+ * <p>
+ * This program is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+ * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+ * details.
+ * <p>
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 51
+ * Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ ******************************************************************************/
+package edu.uw.cs.lil.tiny.ccg.lexicon.factored.lambda;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+import edu.uw.cs.lil.tiny.ccg.categories.ICategoryServices;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicLanguageServices;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicalConstant;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicalExpression;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicalExpressionRuntimeException;
+import edu.uw.cs.lil.tiny.mr.language.type.Type;
+
+public class Lexeme {
+	private final List<LogicalConstant>	constants;
+	private final String				origin;
+	private final List<String>			tokens;
+	private final List<Type>			typeSignature;
+	
+	public Lexeme(List<String> tokens, List<LogicalConstant> constants,
+			String origin) {
+		this.origin = origin;
+		this.constants = Collections.unmodifiableList(constants);
+		this.tokens = Collections.unmodifiableList(tokens);
+		this.typeSignature = Collections
+				.unmodifiableList(getSignature(constants));
+	}
+	
+	public static List<Type> getSignature(List<LogicalConstant> constants) {
+		final List<Type> types = new ArrayList<Type>(constants.size());
+		for (final LogicalConstant constant : constants) {
+			types.add(LogicLanguageServices.getTypeRepository().generalizeType(
+					constant.getType()));
+		}
+		return types;
+	}
+	
+	/**
+	 * Given a string, parse a lexeme from it.
+	 * 
+	 * @param line
+	 * @return
+	 */
+	public static Lexeme parse(String line,
+			ICategoryServices<LogicalExpression> categoryServices, String origin) {
+		
+		final int equalsIndex = line.indexOf("=");
+		final String tokensString = line.substring(1, equalsIndex - 1);
+		final String constantsString = line.substring(equalsIndex + 2,
+				line.length() - 1);
+		
+		final List<String> tokens = new LinkedList<String>();
+		for (final String token : tokensString.split(", ")) {
+			tokens.add(token);
+		}
+		
+		final List<LogicalConstant> constants = new LinkedList<LogicalConstant>();
+		if (!constantsString.equals("")) {
+			for (final String constant : constantsString.split(", ")) {
+				
+				final LogicalExpression exp = categoryServices
+						.parseSemantics(constant);
+				if (!(exp instanceof LogicalConstant)) {
+					throw new LogicalExpressionRuntimeException(
+							"Not a constant error: " + constant);
+				}
+				
+				constants.add((LogicalConstant) exp);
+			}
+		}
+		return new Lexeme(tokens, constants, origin);
+	}
+	
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj) {
+			return true;
+		}
+		if (obj == null) {
+			return false;
+		}
+		if (!(obj instanceof Lexeme)) {
+			return false;
+		}
+		final Lexeme other = (Lexeme) obj;
+		if (constants == null) {
+			if (other.constants != null) {
+				return false;
+			}
+		} else if (!constants.equals(other.constants)) {
+			return false;
+		}
+		if (tokens == null) {
+			if (other.tokens != null) {
+				return false;
+			}
+		} else if (!tokens.equals(other.tokens)) {
+			return false;
+		}
+		return true;
+	}
+	
+	public List<LogicalConstant> getConstants() {
+		return constants;
+	}
+	
+	public String getOrigin() {
+		return origin;
+	}
+	
+	public List<String> getTokens() {
+		return tokens;
+	}
+	
+	public List<Type> getTypeSignature() {
+		return typeSignature;
+	}
+	
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = 1;
+		result = prime * result
+				+ ((constants == null) ? 0 : constants.hashCode());
+		result = prime * result + ((tokens == null) ? 0 : tokens.hashCode());
+		return result;
+	}
+	
+	public boolean matches(List<String> inputTokens) {
+		return tokens.equals(inputTokens);
+	}
+	
+	public int numConstants() {
+		return constants.size();
+	}
+	
+	@Override
+	public String toString() {
+		return tokens + "=" + constants;
+	}
+	
+}

ccg.lexicon.factored.lambda/src/edu/uw/cs/lil/tiny/ccg/lexicon/factored/lambda/LexicalTemplate.java

+/*******************************************************************************
+ * UW SPF - The University of Washington Semantic Parsing Framework
+ * <p>
+ * Copyright (C) 2013 Yoav Artzi
+ * <p>
+ * This program is free software; you can redistribute it and/or modify it under
+ * the terms of the GNU General Public License as published by the Free Software
+ * Foundation; either version 2 of the License, or any later version.
+ * <p>
+ * This program is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+ * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+ * details.
+ * <p>
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 51
+ * Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ ******************************************************************************/
+package edu.uw.cs.lil.tiny.ccg.lexicon.factored.lambda;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Map;
+import java.util.Set;
+
+import edu.uw.cs.lil.tiny.ccg.categories.Category;
+import edu.uw.cs.lil.tiny.ccg.categories.ICategoryServices;
+import edu.uw.cs.lil.tiny.ccg.categories.syntax.Syntax;
+import edu.uw.cs.lil.tiny.mr.lambda.Lambda;
+import edu.uw.cs.lil.tiny.mr.lambda.Literal;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicLanguageServices;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicalConstant;
+import edu.uw.cs.lil.tiny.mr.lambda.LogicalExpression;
+import edu.uw.cs.lil.tiny.mr.lambda.Variable;
+import edu.uw.cs.lil.tiny.mr.lambda.visitor.ILogicalExpressionVisitor;
+import edu.uw.cs.lil.tiny.mr.lambda.visitor.IsWellTyped;
+import edu.uw.cs.lil.tiny.mr.lambda.visitor.ReplaceExpression;
+import edu.uw.cs.lil.tiny.mr.language.type.Type;
+import edu.uw.cs.utils.collections.CollectionUtils;
+import edu.uw.cs.utils.collections.ListUtils;
+import edu.uw.cs.utils.composites.Pair;
+import edu.uw.cs.utils.counter.Counter;
+
+public class LexicalTemplate {
+	
+	private final List<LogicalConstant>			arguments;
+	
+	private final String						origin;
+	
+	private final Category<LogicalExpression>	template;
+	
+	private final List<Type>					typeSignature;
+	
+	/**
+	 * create a template by abstracting all of the constants. NOTE: we are
+	 * assuming that every constant in the list appears somewhere in the logical
+	 * expression for the category.
+	 * 
+	 * @param constants
+	 * @param template
+	 */
+	private LexicalTemplate(List<LogicalConstant> constants,
+			Category<LogicalExpression> template, String origin) {
+		
+		this.origin = origin;
+		this.arguments = Collections.unmodifiableList(constants);
+		this.template = template;
+		final List<Type> types = new ArrayList<Type>(constants.size());
+		for (final LogicalConstant constant : constants) {
+			types.add(constant.getType());
+		}
+		this.typeSignature = Collections.unmodifiableList(types);
+		
+	}
+	
+	public static List<Pair<List<LogicalConstant>, LexicalTemplate>> doFactoring(
+			final Category<LogicalExpression> inputCategory, boolean doMaximal,
+			boolean doPartial, int maxConstantsInPartial, final String origin) {
+		final Set<Pair<AbstractConstants.Placeholders, ? extends LogicalExpression>> factoring = AbstractConstants
+				.of(inputCategory.getSem(), doMaximal, doPartial,
+						maxConstantsInPartial);
+		return ListUtils
+				.map(factoring,
+						new ListUtils.Mapper<Pair<AbstractConstants.Placeholders, ? extends LogicalExpression>, Pair<List<LogicalConstant>, LexicalTemplate>>() {
+							
+							@Override
+							public Pair<List<LogicalConstant>, LexicalTemplate> process(
+									Pair<AbstractConstants.Placeholders, ? extends LogicalExpression> obj) {
+								return Pair.of(
+										Collections.unmodifiableList(obj
+												.first().originals),
+										new LexicalTemplate(
+												Collections.unmodifiableList(obj
+														.first().placeholders),
+												inputCategory
+														.cloneWithNewSemantics(obj
+																.second()),
+												origin));
+							}
+						});
+	}
+	
+	public static Pair<List<LogicalConstant>, LexicalTemplate> doFactoring(
+			Category<LogicalExpression> inputCategory, String origin) {
+		return doFactoring(inputCategory, true, false, 0, origin).get(0);
+	}
+	
+	/**
+	 * Given a string, parse a lexical template from it.
+	 * 
+	 * @param line
+	 * @return
+	 */
+	public static LexicalTemplate parse(String line,
+			ICategoryServices<LogicalExpression> categoryServices, String origin) {
+		final int index = line.indexOf("-->");
+		final String constantsString = line.substring(1, index - 1);
+		final List<LogicalConstant> constants = new LinkedList<LogicalConstant>();
+		if (!constantsString.equals("")) {
+			for (final String constant : constantsString.split(", ")) {
+				final LogicalExpression exp = categoryServices
+						.parseSemantics(constant);
+				if (!(exp instanceof LogicalConstant)) {
+					return null;
+				}
+				constants.add((LogicalConstant) exp);
+			}
+		}
+		
+		final String categoryString = line.substring(index + 3, line.length());
+		
+		return new LexicalTemplate(constants,
+				categoryServices.parse(categoryString), origin);
+	}
+	
+	public LexicalTemplate cloneWithNewSyntax(Syntax syntax) {
+		return new LexicalTemplate(arguments, Category.create(syntax,
+				template.getSem()), origin);
+	}
+	
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj) {
+			return true;
+		}
+		if (obj == null) {
+			return false;
+		}
+		if (!(obj instanceof LexicalTemplate)) {
+			return false;
+		}
+		final LexicalTemplate other = (LexicalTemplate) obj;
+		if (arguments == null) {
+			if (other.arguments != null) {
+				return false;
+			}
+		} else if (!arguments.equals(other.arguments)) {
+			return false;
+		}
+		if (template == null) {
+			if (other.template != null) {
+				return false;
+			}
+		} else if (!template.equals(other.template)) {
+			return false;
+		}
+		return true;
+	}
+	
+	public String getOrigin() {
+		return origin;
+	}
+	
+	public Category<LogicalExpression> getTemplateCategory() {
+		return template;
+	}
+	
+	public List<Type> getTypeSignature() {
+		return typeSignature;
+	}
+	
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = 1;
+		result = prime * result
+				+ ((arguments == null) ? 0 : arguments.hashCode());
+		result = prime * result
+				+ ((template == null) ? 0 : template.hashCode());
+		return result;
+	}
+	
+	public Category<LogicalExpression> makeCategory(Lexeme lexeme) {
+		if (arguments.size() != lexeme.numConstants()) {
+			return null;
+		}
+		LogicalExpression newSemantics = template.getSem();
+		int i = 0;
+		for (final LogicalConstant constant : lexeme.getConstants()) {
+			final LogicalConstant variable = arguments.get(i);
+			newSemantics = ReplaceExpression.of(newSemantics, variable,
+					constant);
+			i++;
+		}
+		if (!IsWellTyped.of(newSemantics)) {
+			return null;
+		}
+		
+		return template.cloneWithNewSemantics(newSemantics);
+	}
+	
+	@Override
+	public String toString() {
+		return arguments + "-->" + template;
+	}
+	
+	public static class AbstractConstants implements ILogicalExpressionVisitor {
+		private final Map<Type, Counter>								counters	= new HashMap<Type, Counter>();
+		private final boolean											doMaximal;
+		private final boolean											doPartial;
+		private final int												partialMaxConstants;
+		private List<Pair<Placeholders, ? extends LogicalExpression>>	tempReturn	= null;
+		
+		private AbstractConstants(boolean doMaximal, boolean doPartial,
+				int partialMaxConstants) {
+			// Usage only through static 'of' method
+			this.doMaximal = doMaximal;
+			this.doPartial = doPartial;
+			this.partialMaxConstants = partialMaxConstants;
+		}
+		
+		public static Set<Pair<Placeholders, ? extends LogicalExpression>> of(
+				LogicalExpression exp, boolean getMaximal, boolean getPartial,
+				int partialMaxConstants) {
+			final AbstractConstants visitor = new AbstractConstants(getMaximal,
+					getPartial, partialMaxConstants);
+			visitor.visit(exp);
+			
+			// Remove any empty factoring, unless it's a maximal one
+			final Iterator<Pair<Placeholders, ? extends LogicalExpression>> iterator = visitor.tempReturn
+					.iterator();
+			while (iterator.hasNext()) {
+				final Pair<Placeholders, ? extends LogicalExpression> pair = iterator
+						.next();
+				if (!pair.first().isMaximal() && pair.first().size() == 0) {
+					iterator.remove();
+				}
+			}
+			
+			return new HashSet<Pair<Placeholders, ? extends LogicalExpression>>(
+					visitor.tempReturn);
+		}
+		
+		private static Pair<Placeholders, ? extends LogicalExpression> getAndRemoveMaximal(
+				List<Pair<Placeholders, ? extends LogicalExpression>> pairs) {
+			Pair<Placeholders, ? extends LogicalExpression> maximal = null;
+			final Iterator<Pair<Placeholders, ? extends LogicalExpression>> iterator = pairs
+					.iterator();
+			while (iterator.hasNext()) {
+				final Pair<Placeholders, ? extends LogicalExpression> pair = iterator
+						.next();
+				if (pair.first().isMaximal()) {
+					if (maximal == null) {
+						maximal = pair;
+						iterator.remove();
+					} else {
+						throw new IllegalStateException(
+								"found more than one maximal");
+					}
+				}
+			}
+			
+			if (maximal == null) {
+				throw new IllegalStateException(
+						"expected a maximal pair, not found");
+			}
+			
+			return maximal;
+		}
+		
+		@Override
+		public void visit(Lambda lambda) {
+			// not visiting argument, since we are only abstracting constants.
+			lambda.getBody().accept(this);
+			final ListIterator<Pair<Placeholders, ? extends LogicalExpression>> iterator = tempReturn
+					.listIterator();
+			while (iterator.hasNext()) {
+				final Pair<Placeholders, ? extends LogicalExpression> pair = iterator
+						.next();
+				if (pair.second() != null) {
+					final LogicalExpression newBody = pair.second();
+					if (newBody == lambda.getBody()) {
+						iterator.set(Pair.of(pair.first(), lambda));
+					} else {
+						iterator.set(Pair.of(
+								pair.first(),
+								new Lambda(lambda.getArgument(), newBody,
+										LogicLanguageServices
+												.getTypeRepository())));
+					}
+					
+				}
+			}
+		}
+		
+		@Override
+		public void visit(Literal literal) {
+			// Visit the predicate
+			literal.getPredicate().accept(this);
+			final List<Pair<Placeholders, ? extends LogicalExpression>> predicateReturn = tempReturn;
+			
+			final List<LogicalExpression> args = new ArrayList<LogicalExpression>(
+					literal.getArguments());
+			
+			final List<List<Pair<Placeholders, ? extends LogicalExpression>>> argReturns = new ArrayList<List<Pair<Placeholders, ? extends LogicalExpression>>>(
+					args.size());
+			
+			// In case of an order insensitive, sort the arguments by hashcode,
+			// so the abstraction of constants will be insensitive to order,
+			// when that order doesn't matter. TODO [yoav] [urgent] this
+			// solution is still not perfect and might cause duplicate
+			// templates/lexemes where such shouldn't exist. To fix it, we need
+			// to change lexemes to hold a set of constants and not a list
+			// (this, in turn, will cause difficulties in init templates)
+			if (!literal.getPredicateType().isOrderSensitive()) {
+				Collections.sort(args, new Comparator<LogicalExpression>() {
+					public int compare(LogicalExpression l1,
+							LogicalExpression l2) {
+						return l1.hashCode() - l2.hashCode();
+					}
+				});
+			}
+			
+			for (final LogicalExpression arg : args) {
+				arg.accept(this);
+				argReturns.add(tempReturn);
+			}
+			
+			tempReturn = new LinkedList<Pair<Placeholders, ? extends LogicalExpression>>();
+			
+			if (doMaximal) {
+				// Do the maximal combination by getting all the maximals.
+				// Each returned list should have a single maximal, no more, no
+				// less. The maximal is also removed to make it simpler to do
+				// the partial ones later on.
+				final Pair<Placeholders, ? extends LogicalExpression> predPair = getAndRemoveMaximal(predicateReturn);
+				final List<Pair<Placeholders, ? extends LogicalExpression>> argPairs = ListUtils
+						.map(argReturns,
+								new ListUtils.Mapper<List<Pair<Placeholders, ? extends LogicalExpression>>, Pair<Placeholders, ? extends LogicalExpression>>() {
+									@Override
+									public Pair<Placeholders, ? extends LogicalExpression> process(
+											List<Pair<Placeholders, ? extends LogicalExpression>> obj) {
+										return getAndRemoveMaximal(obj);
+									}
+								});
+				final Placeholders placeholder = predPair.first();
+				int i = 0;
+				boolean argsChanged = false;
+				final List<LogicalExpression> newArgs = new ArrayList<LogicalExpression>(
+						args.size());
+				for (final Pair<Placeholders, ? extends LogicalExpression> argPair : argPairs) {
+					placeholder.concat(argPair.first());
+					newArgs.add(argPair.second());
+					if (args.get(i) != argPair.second()) {
+						argsChanged = true;
+					}
+					++i;
+				}
+				if (argsChanged || predPair.second() != literal.getPredicate()) {
+					tempReturn
+							.add(Pair.of(
+									placeholder,
+									new Literal(
+											predPair.second() == literal
+													.getPredicate() ? literal
+													.getPredicate() : predPair
+													.second(), newArgs,
+											LogicLanguageServices
+													.getTypeComparator(),
+											LogicLanguageServices
+													.getTypeRepository())));
+				} else {
+					tempReturn.add(Pair.of(placeholder, literal));
+				}
+				
+			}
+			
+			if (doPartial) {
+				// At this point, if maximal pairs were present, they were
+				// removed
+				for (final Pair<Placeholders, ? extends LogicalExpression> predPair : predicateReturn) {
+					for (final List<Pair<Placeholders, ? extends LogicalExpression>> argPairs : CollectionUtils
+							.cartesianProduct(argReturns)) {
+						final Placeholders placeholder = new Placeholders();
+						placeholder.concat(predPair.first());
+						int i = 0;
+						boolean argsChanged = false;
+						final List<LogicalExpression> newArgs = new ArrayList<LogicalExpression>(
+								args.size());
+						boolean fail = false;
+						for (final Pair<Placeholders, ? extends LogicalExpression> argPair : argPairs) {
+							if (placeholder.size() + argPair.first().size() <= partialMaxConstants) {
+								placeholder.concat(argPair.first());
+								newArgs.add(argPair.second());
+								if (args.get(i) != argPair.second()) {
+									argsChanged = true;
+								}
+								++i;
+							} else {
+								fail = true;
+								break;
+							}
+						}
+						if (!fail) {
+							if (argsChanged
+									|| predPair.second() != literal
+											.getPredicate()) {
+								tempReturn
+										.add(Pair.of(
+												placeholder,
+												new Literal(
+														predPair.second() == literal
+																.getPredicate() ? literal
+																.getPredicate()
+																: predPair
+																		.second(),
+														newArgs,
+														LogicLanguageServices
+																.getTypeComparator(),
+														LogicLanguageServices
+																.getTypeRepository())));
+							} else {
+								tempReturn.add(Pair.of(placeholder, literal));
+							}
+						}
+					}
+				}
+			}
+		}
+		
+		@Override
+		public void visit(LogicalConstant logicalConstant) {
+			if (FactoredLexiconServices.isFactorable(logicalConstant)) {
+				tempReturn = new ArrayList<Pair<Placeholders, ? extends LogicalExpression>>(
+						3);
+				
+				if (doPartial) {
+					// No factoring (empty) placeholder
+					final Pair<Placeholders, ? extends LogicalExpression> noFactoringPair = Pair
+							.of(new Placeholders(), logicalConstant);
+					tempReturn.add(noFactoringPair);
+					// Partial factoring placeholder
+					final Placeholders factoringPlaceholder = new Placeholders();
+					final Pair<Placeholders, ? extends LogicalExpression> factoringPair = Pair
+							.of(factoringPlaceholder,
+									factoringPlaceholder.add(logicalConstant));
+					tempReturn.add(factoringPair);
+				}
+				
+				if (doMaximal) {
+					// Maximal factoring placeholder
+					final Placeholders factoringPlaceholder = new Placeholders(
+							true);
+					final Pair<Placeholders, ? extends LogicalExpression> factoringPair = Pair
+							.of(factoringPlaceholder,
+									factoringPlaceholder.add(logicalConstant));
+					tempReturn.add(factoringPair);
+				}
+				final Type genType = LogicLanguageServices.getTypeRepository()
+						.generalizeType(logicalConstant.getType());
+				if (counters.containsKey(genType)) {
+					counters.get(genType).inc();
+				} else {
+					counters.put(genType, new Counter(1));
+				}
+			} else {
+				// No factoring, only empty placeholders
+				
+				tempReturn = new ArrayList<Pair<Placeholders, ? extends LogicalExpression>>(
+						2);
+				
+				if (doPartial) {
+					// No factoring (empty) placeholder
+					final Pair<Placeholders, ? extends LogicalExpression> noFactoringPair = Pair
+							.of(new Placeholders(), logicalConstant);
+					tempReturn.add(noFactoringPair);
+				}
+				
+				if (doMaximal) {
+					// Maximal factoring (empty) placeholder
+					final Pair<Placeholders, ? extends LogicalExpression> factoringPair = Pair
+							.of(new Placeholders(true), logicalConstant);
+					tempReturn.add(factoringPair);
+				}
+				
+			}
+		}
+		
+		@Override
+		public void visit(LogicalExpression logicalExpression) {
+			logicalExpression.accept(this);
+		}
+		
+		@Override
+		public void visit(Variable variable) {
+			tempReturn = new ArrayList<Pair<Placeholders, ? extends LogicalExpression>>(
+					2);
+			
+			// No factoring (empty) placeholder: maximal
+			if (doMaximal) {
+				final Pair<Placeholders, ? extends LogicalExpression> p = Pair
+						.of(new Placeholders(true), variable);
+				tempReturn.add(p);
+			}
+			
+			// No factoring (empty) placeholder: partial
+			if (doMaximal) {
+				final Pair<Placeholders, ? extends LogicalExpression> p = Pair