@COMMENT This file was generated by bib2html.pl <https://sourceforge.net/projects/bib2html/> version 0.94
@COMMENT written by Patrick Riley <http://sourceforge.net/users/patstg/>
@COMMENT This file came from Kuldeep S. Meel's publication pages at
@COMMENT http://www.comp.nus.edu.sg/~meel/publications/
@inproceedings{XXKMS19,
title={Embedding Symbolic Knowledge into Deep Networks},
author={Xie, Yaqi and Xu, Ziwei and Kankanhalli,Mohan S. and  Meel,Kuldeep S. and Soh, Harold},
booktitle=NIPS,
year={2019},
month=dec,
bib2html_dl_pdf={https://arxiv.org/abs/1909.01161},
code={https://github.com/ZiweiXU/LENSR},
bib2html_pubtype={Refereed Conference},
bib2html_rescat={Formal Methods 4 ML},	
abstract={    In this work, we aim to leverage prior symbolic knowledge to improve the performance of deep models. 
  We propose a graph embedding network that projects propositional formulae (and assignments) onto a manifold via 
  an augmented Graph Convolutional Network (GCN). To generate semantically-faithful embeddings, we develop techniques 
 to recognize node heterogeneity, and semantic regularization that incorporate structural constraints into the 
 embedding. Experiments show that our approach improves the performance of models trained to perform entailment 
 checking and visual relation prediction. Interestingly, we observe a connection between the tractability of the propositional theory representation and the ease of embedding. Future exploration of this connection may elucidate the relationship between knowledge compilation and vector representation learning. },
}
