@conference {86, title = {Learning Markov networks with context-specific independences}, booktitle = {IEEE 25th International Conference on Tools with Artificial Intelligence (ICTAI)}, year = {2013}, month = {Nov}, keywords = {Algorithm design and analysis, Complexity theory, conditional independences, Context, Context modeling, context-specific independences, CSPC algorithm, Encoding, independence relations, independence-based, independence-based algorithms, independence-based learning approach, Inference algorithms, learning (artificial intelligence), learning structures, log-linear model, machine learning, Markov network structure, Markov networks, Markov processes, Markov random fields, network theory (graphs), Structure learning, undirected graph}, doi = {10.1109/ICTAI.2013.88}, author = {Edera, A. and Schl{\"u}ter, F. and Bromberg, F.} } @article {170, title = {Efficient Markov network discovery using particle filters}, journal = {Computational Intelligence}, volume = {25}, year = {2009}, month = {11/2009}, pages = {367{\textendash}394}, abstract = {

In this paper, we introduce an efficient independence-based algorithm for the induction of the Markov network (MN) structure of a domain from the outcomes of independence test conducted on data. Our algorithm utilizes a particle filter (sequential Monte Carlo) method to maintain a population of MN structures that represent the posterior probability distribution over structures, given the outcomes of the tests performed. This enables us to select, at each step, the maximally informative test to conduct next from a pool of candidates according to information gain, which minimizes the cost of the statistical tests conducted on data. This makes our approach useful in domains where independence tests are expensive, such as cases of very large data sets and/or distributed data. In addition, our method maintains multiple candidate structures weighed by posterior probability, which allows flexibility in the presence of potential errors in the test outcomes.

}, keywords = {graphical model structure learning, Markov networks, particle filters, sequential Monte Carlo}, doi = {10.1111/j.1467-8640.2009.00347.x}, url = {http://onlinelibrary.wiley.com/doi/10.1111/j.1467-8640.2009.00347.x/abstract;jsessionid=C95A98B67CD44AF9ABF59B3B0CCAA979.f01t02?userIsAuthenticated=false\&deniedAccessCustomisedMessage=}, author = {Margaritis, Dimitris and Bromberg, Facundo} } @article {72, title = {A survey on independence-based Markov networks learning}, journal = {Artificial Intelligence Review}, volume = {42}, year = {2012}, month = {06/2012}, pages = {1093}, chapter = {1069}, abstract = {

The problem of learning\ the Markov network structure\ from data\ has become increasingly important in machine learning,
and in many other application fields.\ Markov networks are probabilistic graphical models,\ a widely used formalism for handling probability distributions in intelligent systems.\ This document focuses on a technology called \emph{independence-based} learning,\ which allows for the learning of the independence structure of Markov networks from data in an efficient and sound manner,\ whenever the dataset is sufficiently large, and data is a representative sample of the target distribution.\ In the analysis of such technology, this work surveys the current state-of-the-art algorithms,\ discussing its limitations, and posing a series of open problems\ where future work may produce some advances in the area, in terms of quality and efficiency.

}, keywords = {independence-based, Markov networks, Structure learning, survey}, issn = {1573-7462}, doi = {10.1007/s10462-012-9346-y}, url = {http://www.springerlink.com/content/e0l3113827341422}, author = {Federico Schl{\"u}ter} }