@article {10.3389/frobt.2017.00010, title = {A Package for Measuring Emergence, Self-organization, and Complexity Based on Shannon Entropy}, journal = {Frontiers in Robotics and AI}, volume = {4}, year = {2017}, pages = {10}, abstract = {We present Matlab/Octave functions to calculate measures of emergence, self-organization, and complexity of discrete and continuous data. The measures are based on Shannon{\textquoteright}s information and differential entropy, respectively. Examples from different datasets and probability distributions are used to illustrate the usage of the code.}, issn = {2296-9144}, doi = {10.3389/frobt.2017.00010}, url = {http://journal.frontiersin.org/article/10.3389/frobt.2017.00010}, author = {Santamar{\'\i}a-Bonfil, Guillermo and Gershenson, Carlos and Fern{\'a}ndez, Nelson} } @article {CxContinuous2016, title = {Measuring the Complexity of Continuous Distributions}, journal = {Entropy}, volume = {18}, number = {3}, year = {2016}, pages = {72}, abstract = {We extend previously proposed measures of complexity, emergence, and self-organization to continuous distributions using differential entropy. Given that the measures were based on Shannon{\textquoteright}s information, the novel continuous complexity measures describe how a system{\textquoteright}s predictability changes in terms of the probability distribution parameters. This allows us to calculate the complexity of phenomena for which distributions are known. We find that a broad range of common parameters found in Gaussian and scale-free distributions present high complexity values. We also explore the relationship between our measure of complexity and information adaptation.}, issn = {1099-4300}, doi = {10.3390/e18030072}, url = {http://www.mdpi.com/1099-4300/18/3/72}, author = {Santamar{\'\i}a-Bonfil, Guillermo and Fern{\'a}ndez, Nelson and Gershenson, Carlos} }