sa15.bib

@book{Kandel2000,
  abstract = {Now in resplendent color, the new edition continues to define the latest in the scientific understanding of the brain, the nervous system, and human behavior. Each chapter is thoroughly revised and includes the impact of molecular biology in the mechanisms underlying developmental processes and in the pathogenesis of disease. Important features to this edition include a new chapter - Genes and Behavior; a complete updating of development of the nervous system; the genetic basis of neurological and psychiatric disease; cognitive neuroscience of perception, planning, action, motivation and memory; ion channel mechanisms; and much more.},
  author = {Kandel, Eric R and Schwartz, James H and Jessell, Thomas M},
  booktitle = {Neurology},
  doi = {10.1036/0838577016},
  isbn = {0838577016},
  issn = {01956108},
  pages = {1414},
  pmid = {11769631},
  title = {{Principles of Neural Science}},
  url = {http://www.amazon.com/Principles-Neural-Science-Eric-Kandel/dp/0838577016},
  volume = {4},
  year = {2000}
}
@book{Dayan2001,
  abstract = {Theoretical neuroscience provides a quantitative basis for describing what nervous systems do, determining how they function, and uncovering the general principles by which they operate. This text introduces the basic mathematical and computational methods of theoretical neuroscience and presents applications in a variety of areas including vision, sensory-motor integration, development, learning, and memory. The book is divided into three parts. Part I. discusses the relationship between sensory stimuli and neural responses, focusing on the representation of information by the spiking activity of neurons. Part II discusses the modeling of neurons and neural circuits on the basis of cellular and synaptic biophysics. Part III analyzes the role of plasticity in development and learning. An appendix covers the mathematical methods used, and exercises are available on the book's Web site.},
  author = {Dayan, Peter and Abbott, L F},
  booktitle = {Comput. Math. Model. Neural \ldots},
  doi = {10.1016/j.neuron.2008.10.019},
  isbn = {0262041995},
  issn = {10974199},
  mendeley-groups = {ground standing neuros},
  pages = {480},
  pmid = {18995824},
  title = {{Theoretical Neuroscience: Computational and Mathematical Modeling of Neural Systems}},
  year = {2001}
}
@book{Bear2007,
  abstract = {(from the preface) Neuroscience: Exploring the Brain surveys the organization and function of the human nervous system. We present material at the cutting edge of neuroscience, in a way that is accessible to both science and nonscience students alike. The level of the material is comparable to an introductory college text in general biology. The book is divided into four parts: Part I, Foundations; Part II, Sensory and Motor Systems; Part III, The Brain and Behavior; and Part IV, The Changing Brain. We begin Part I by introducing the modern field of neuroscience and tracing some of its historical antecedents. Then we take a close look at the structure and function of individual neurons, how they communicate chemically, and how these building blocks are arranged to form a nervous system. In Part II, we go inside the brain to examine the structure and function of the systems that serve the senses and command voluntary movements. In Part III, we explore the neurobiology of human behavior, including motivation, sex, emotion, sleep, language, attention, and mental illness. Finally, in Part IV, we look at how the environment modifies the brain, both during development and in adult learning and memory. (PsycINFO Database Record (c) 2012 APA, all rights reserved)},
  author = {Bear, Mark F and Connors, Barry W and Paradiso, Michael A},
  booktitle = {Neurosci. Explor. brain (3rd ed.).},
  isbn = {0-7817-6003-8 (Hardcover)},
  pages = {898},
  title = {{Neuroscience: Exploring the brain (3rd ed.).}},
  url = {http://www.mendeley.com/catalog/neuroscience-exploring-brain-3rd-ed/},
  year = {2007}
}
@book{Gerstner2002,
  abstract = {This introduction to spiking neurons can be used in advanced-level courses in computational neuroscience, theoretical biology, neural modeling, biophysics, or neural networks. It focuses on phenomenological approaches rather than detailed models in order to provide the reader with a conceptual framework. The authors formulate the theoretical concepts clearly without many mathematical details. While the book contains standard material for courses in computational neuroscience, neural modeling, or neural networks, it also provides an entry to current research. No prior knowledge beyond undergraduate mathematics is required.},
  author = {Gerstner, Wulfram and Kistler, Werner M},
  booktitle = {Book},
  doi = {10.2277/0511075065},
  isbn = {9780521890793},
  issn = {15393755},
  mendeley-groups = {ground standing neuros},
  pages = {494},
  pmid = {12443231},
  title = {{Spiking Neuron Models: Single Neurons, Populations, Plasticity}},
  year = {2002}
}
@book{Rieke1997,
  abstract = {What does it mean to say that a certain set of spikes is the right answer to a computational problem? In what sense does a spike train convey information about the sensory world? Spikes begins by providing precise formulations of these and related questions about the representation of sensory signals in neural spike trains. The answers to these questions are then pursued in experiments on sensory neurons.Intended for neurobiologists with an interest in mathematical analysis of neural data as well as the growing number of physicists and mathematicians interested in information processing by "real" nervous systems, Spikes provides a self-contained review of relevant concepts in information theory and statistical decision theory.},
  author = {Rieke, F. and Warland, D. and {De Ruyter Van Steveninck}, R. and Bialek, W.},
  booktitle = {Comput. Neurosci.},
  isbn = {0262181746},
  issn = {0028-0836},
  mendeley-groups = {ground standing neuros},
  pages = {xvi, 395 p.},
  pmid = {2562525},
  title = {{Spikes: Exploring the Neural Code}},
  volume = {20},
  year = {1997}
}
@book{Jacobson2008,
  abstract = {This textbook provides a single text for the undergraduate, and graduate student and for the first and second year},
  author = {Jacobson, Stanley and Marcus, Elliott M},
  booktitle = {Book},
  doi = {10.1007/978-0-387-70971-0},
  isbn = {1441996524},
  issn = {0387709703},
  keywords = {Anatomy.,Biomedicine.,Electronic books.,Human anatomy.,Medicine.,Neuroanatomy.,Neurology.,Neurosciences.},
  pages = {546},
  title = {{Neuroanatomy for the neuroscientist}},
  url = {SpringerLink http://dx.doi.org.ezp.lib.unimelb.edu.au/10.1007/978-1-4419-9653-4 Connect to ebook (University of Melbourne only)},
  year = {2008}
}
@book{Molleman2003,
  abstract = {Basic theoretical principles - Requirements - The practice of patch clamping - Whole-cell protocols and data analysis - Single-channel protocols and data analysis},
  author = {Molleman, Areles},
  booktitle = {Wiley},
  doi = {10.1002/0470856521},
  isbn = {047148685X},
  issn = {0015-5691},
  pages = {186},
  pmid = {17167208},
  title = {{Patch Clamping: An Introductory Guide to Patch Clamp Electrophysiology. 2003}},
  url = {http://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:Patch+Clamping:+An+Introductory+Guide+to+Patch+Clamp+Electrophysiology\#4$\backslash$nhttp://scholar.google.com/scholar?hl=en\&btnG=Search\&q=intitle:Patch+Clamping:+An+Introductory+Guide+to+Patch+Clamp+Electrophysiology.+2003\#0},
  year = {2003}
}
@book{Windhorst1999,
  abstract = {This manual provides an overview of the techniques used in modern neuroscience research. The emphasis is on showing how different techniques can optimally be combined in the study of problems that arise at some levels of nervous system organization. It is a working tool for the scientist in the laboratory and clinic, providing detailed step-by-step protocols with tips and recommendations. Most chapters or protocols are organized such that they can be used independently of one another. Cross-references between the chapters, a glossary, a list of suppliers and appendices provide further help.},
  author = {Windhorst, U and Johansson, H},
  booktitle = {Book},
  isbn = {3540644601},
  pages = {1325},
  title = {{Modern Techniques in Neuroscience Research}},
  url = {http://books.google.com/books?id=cjCnNtIhjMQC\&pgis=1},
  year = {1999}
}
@book{Izhikevich2007,
  abstract = {In order to model neuronal behavior or to interpret the results of modeling studies, neuroscientists must call upon methods of nonlinear dynamics. This book offers an introduction to nonlinear dynamical systems theory for researchers and graduate students in neuroscience. It also provides an overview of neuroscience for mathematicians who want to learn the basic facts of electrophysiology.Dynamical Systems in Neuroscience presents a systematic study of the relationship of electrophysiology, nonlinear dynamics, and computational properties of neurons. It emphasizes that information processing in the brain depends not only on the electrophysiological properties of neurons but also on their dynamical properties. The book introduces dynamical systems, starting with one- and two-dimensional Hodgkin-Huxley-type models and continuing to a description of bursting systems. Each chapter proceeds from the simple to the complex, and provides sample problems at the end. The book explains all necessary mathematical concepts using geometrical intuition; it includes many figures and few equations, making it especially suitable for non-mathematicians. Each concept is presented in terms of both neuroscience and mathematics, providing a link between the two disciplines. Nonlinear dynamical systems theory is at the core of computational neuroscience research, but it is not a standard part of the graduate neuroscience curriculumor taught by math or physics department in a way that is suitable for students of biology. This book offers neuroscience students and researchers a comprehensive account of concepts and methods increasingly used in computational neuroscience.An additional chapter on synchronization, with more advanced material, can be found at the author's website, www.izhikevich.com.},
  author = {Izhikevich, Eugene M},
  booktitle = {Dyn. Syst.},
  doi = {10.1017/S0143385704000173},
  isbn = {9780262090438},
  issn = {0143-3857},
  pages = {441},
  pmid = {14304245},
  title = {{Dynamical Systems in Neuroscience}},
  url = {http://www.amath.washington.edu/~jgoldwyn/content/amath575present.pdf},
  year = {2007}
}
@article{Abbott2000,
  abstract = {Synaptic plasticity provides the basis for most models of learning, memory and development in neural circuits. To generate realistic results, synapse-specific Hebbian forms of plasticity, such as long-term potentiation and depression, must be augmented by global processes that regulate overall levels of neuronal and network activity. Regulatory processes are often as important as the more intensively studied Hebbian processes in determining the consequences of synaptic plasticity for network function. Recent experimental results suggest several novel mechanisms for regulating levels of activity in conjunction with Hebbian synaptic modification. We review three of them-synaptic scaling, spike-timing dependent plasticity and synaptic redistribution-and discuss their functional implications.},
  author = {Abbott, L F and Nelson, S B},
  doi = {10.1038/81453},
  isbn = {1097-6256 (Print)},
  issn = {1097-6256},
  journal = {Nat. Neurosci.},
  pages = {1178--1183},
  pmid = {11127835},
  title = {{Synaptic plasticity: taming the beast.}},
  volume = {3 Suppl},
  year = {2000}
}
@article{Citri2008,
  abstract = {Experiences, whether they be learning in a classroom, a stressful event, or ingestion of a psychoactive substance, impact the brain by modifying the activity and organization of specific neural circuitry. A major mechanism by which the neural activity generated by an experience modifies brain function is via modifications of synaptic transmission; that is, synaptic plasticity. Here, we review current understanding of the mechanisms of the major forms of synaptic plasticity at excitatory synapses in the mammalian brain. We also provide examples of the possible developmental and behavioral functions of synaptic plasticity and how maladaptive synaptic plasticity may contribute to neuropsychiatric disorders.},
  author = {Citri, Ami and Citri, Ami and Malenka, Robert C and Malenka, Robert C},
  doi = {10.1038/sj.npp.1301559},
  isbn = {0893-133X (Print)$\backslash$r0006-3223 (Linking)},
  issn = {0893-133X},
  journal = {Neuropsychopharmacology},
  keywords = {Animals,Models, Neurological,Neuronal Plasticity,Neuronal Plasticity: physiology,Neurons,Neurons: physiology,Receptors, N-Methyl-D-Aspartate,Receptors, N-Methyl-D-Aspartate: physiology,Synapses,Synapses: physiology,Synaptic Transmission,Synaptic Transmission: physiology},
  number = {1},
  pages = {18--41},
  pmid = {17728696},
  title = {{Synaptic plasticity: multiple forms, functions, and mechanisms.}},
  url = {http://www.ncbi.nlm.nih.gov/pubmed/17728696},
  volume = {33},
  year = {2008}
}
@book{Khalil2002,
  abstract = {This book is written is such a way that the level of mathematical sophistication builds up from chapter to chapter. It has been reorganized into four parts: basic analysis, analysis of feedback systems, advanced analysis, and nonlinear feedback control. Updated content includes subjects which have proven useful in nonlinear control design in recent years\&8212;new in the 3rd edition are: expanded treatment of passivity and passivity-based control; integral control, high-gain feedback, recursive methods, optimal stabilizing control, control Lyapunov functions, and observers. For use as a self-study or reference guide by engineers and applied mathematicians.},
  author = {Khalil, H K},
  booktitle = {cdswebcernch},
  doi = {10.1016/j.physa.2006.08.011},
  isbn = {0130673897},
  issn = {03784371},
  pages = {750},
  title = {{Nonlinear Systems, Third Edition}},
  url = {http://cdsweb.cern.ch/record/1173048},
  year = {2002}
}
@incollection{Strogatz1994,
  abstract = {An introductory text in nonlinear dynamics and chaos, emphasizing applications in several areas of science, which include vibrations, biological rhythms, insect outbreaks, and genetic control systems. Contains a rich selection of illustrations, with many exercises and examples},
  author = {Strogatz, Steven H.},
  booktitle = {Book},
  doi = {9780738204536},
  isbn = {0738204536},
  pages = {1--505},
  title = {{Nonlinear Dynamics and Chaos}},
  year = {1994}
}
@book{Rojas1996,
  abstract = {Neural networks are a computing paradigm that is finding increasing attention among computer scientists. In this book, theoretical laws and models previously scattered in the literature are brought together into a general theory of artificial neural nets. Always with a view to biology and starting with the simplest nets, it is shown how the properties of models change when more general computing elements and net topologies are introduced. Each chapter contains examples, numerous illustrations, and a bibliography. The book is aimed at readers who seek an overview of the field or who wish to deepen their knowledge. It is suitable as a basis for university courses in neurocomputing.},
  author = {Rojas, R},
  booktitle = {Neural Networks},
  doi = {10.1016/0893-6080(94)90051-5},
  isbn = {3540605053},
  issn = {08936080},
  pages = {502},
  pmid = {21655600},
  title = {{Neural networks: a systematic introduction}},
  url = {http://www.amazon.ca/exec/obidos/redirect?tag=citeulike09-20\&path=ASIN/3540605053},
  year = {1996}
}
@book{Hertz1991,
  abstract = {This book is a comprehensive introduction to the neural network models currently under intensive study for computational applications. It is a detailed, logically-developed treatment that covers the theory and uses of collective computational networks, including associative memory, feed forward networks, and unsupervised learning. It also provides coverage of neural network applications in a variety of problems of both theoretical and practical interest.},
  author = {Hertz, J and Krogh, A and Palmer, R G},
  booktitle = {Introd. to Theory Neural Comput.},
  isbn = {0201503956},
  pages = {327},
  pmid = {25693158},
  title = {{Introduction to the Theory of Neural Computation}},
  url = {http://books.google.com/books?id=9a\_SyUG-A24C\&pgis=1},
  volume = {1},
  year = {1991}
}
@book{kumar2004neural,
  title = {Neural networks: a classroom approach},
  author = {Kumar, Satish},
  year = {2004},
  publisher = {Tata McGraw-Hill Education}
}
@article{Schneidman2003,
  title = {Network Information and Connected Correlations},
  author = {Schneidman, Elad and Still, Susanne and Berry, Michael J. and Bialek, William},
  journal = {Phys. Rev. Lett.},
  volume = {91},
  issue = {23},
  pages = {238701},
  numpages = {4},
  year = {2003},
  month = {Dec},
  publisher = {American Physical Society},
  doi = {10.1103/PhysRevLett.91.238701},
  url = {http://link.aps.org/doi/10.1103/PhysRevLett.91.238701}
}
@article{Averbeck2006,
  abstract = {How the brain encodes information in population activity, and how it combines and manipulates that activity as it carries out computations, are questions that lie at the heart of systems neuroscience. During the past decade, with the advent of multi-electrode recording and improved theoretical models, these questions have begun to yield answers. However, a complete understanding of neuronal variability, and, in particular, how it affects population codes, is missing. This is because variability in the brain is typically correlated, and although the exact effects of these correlations are not known, it is known that they can be large. Here, we review studies that address the interaction between neuronal noise and population codes, and discuss their implications for population coding in general.},
  author = {Averbeck, Bruno B and Latham, Peter E and Pouget, Alexandre},
  doi = {10.1038/nrn1888},
  isbn = {1471-003X (Print)$\backslash$n1471-003X (Linking)},
  issn = {1471-003X},
  journal = {Nat. Rev. Neurosci.},
  number = {5},
  pages = {358--366},
  pmid = {16760916},
  title = {{Neural correlations, population coding and computation.}},
  volume = {7},
  year = {2006}
}
@article{Borst1999,
  abstract = {Information theory quantifies how much information a neural response carries about the stimulus. This can be compared to the information transferred in particular models of the stimulus-response function and to maximum possible information transfer. Such comparisons are crucial because they validate assumptions present in any neurophysiological analysis. Here we review information-theory basics before demonstrating its use in neural coding. We show how to use information theory to validate simple stimulus-response models of neural coding of dynamic stimuli. Because these models require specification of spike timing precision, they can reveal which time scales contain information in neural coding. This approach shows that dynamic stimuli can be encoded efficiently by single neurons and that each spike contributes to information transmission. We argue, however, that the data obtained so far do not suggest a temporal code, in which the placement of spikes relative to each other yields additional information.},
  author = {Borst, A and Theunissen, F E},
  doi = {10.1038/14731},
  isbn = {1097-6256 (Print)$\backslash$r1097-6256 (Linking)},
  issn = {1097-6256},
  journal = {Nat. Neurosci.},
  number = {11},
  pages = {947--957},
  pmid = {10526332},
  title = {{Information theory and neural coding.}},
  volume = {2},
  year = {1999}
}
@article{Panzeri1999,
  abstract = {Is the information transmitted by an ensemble of neurons determined solely by the number of spikes fired by each cell, or do correlations in the emission of action potentials also play a significant role? We derive a simple formula which enables this question to be answered rigorously for short time-scales. The formula quantifies the corrections to the instantaneous information rate which result from correlations in spike emission between pairs of neurons. The mutual information that the ensemble of neurons conveys about external stimuli can thus be broken down into firing rate and correlation components. This analysis provides fundamental constraints upon the nature of information coding, showing that over short time-scales correlations cannot dominate information representation, that stimulus-independent correlations may lead to synergy (where the neurons together convey more information than they would if they were considered independently), but that only certain combinations of the different sources of correlation result in significant synergy rather than in redundancy or in negligible effects. This analysis leads to a new quantification procedure which is directly applicable to simultaneous multiple neuron recordings.},
  author = {Panzeri, S and Schultz, S R and Treves, A and Rolls, E T},
  doi = {10.1098/rspb.1999.0736},
  isbn = {0962-8452 (Print)},
  issn = {0962-8452},
  journal = {Proc. Biol. Sci.},
  number = {1423},
  pages = {1001--1012},
  pmid = {10610508},
  title = {{Correlations and the encoding of information in the nervous system.}},
  volume = {266},
  year = {1999}
}
@article{Mackay2003,
  abstract = {Information theory and inference, often taught separately, are here united in one entertaining textbook. These topics lie at the heart of many exciting areas of contemporary science and engineering - communication, signal processing, data mining, machine learning, pattern recognition, computational neuroscience, bioinformatics, and cryptography. This textbook introduces theory in tandem with applications. Information theory is taught alongside practical communication systems, such as arithmetic coding for data compression and sparse-graph codes for error-correction. A toolbox of inference techniques, including message-passing algorithms, Monte Carlo methods, and variational approximations, are developed alongside applications of these tools to clustering, convolutional codes, independent component analysis, and neural networks. The final part of the book describes the state of the art in error-correcting codes, including low-density parity-check codes, turbo codes, and digital fountain codes - the twenty-first century standards for satellite communications, disk drives, and data broadcast. Richly illustrated, filled with worked examples and over 400 exercises, some with detailed solutions, David MacKay's groundbreaking book is ideal for self- learning and for undergraduate or graduate courses. Interludes on crosswords, evolution, and sex provide entertainment along the way. In sum, this is a textbook on information, communication, and coding for a new generation of students, and an unparalleled entry point into these subjects for professionals in areas as diverse as computational biology, financial engineering, and machine learning.},
  author = {Mackay, David J C},
  doi = {10.1017/S026357470426043X},
  isbn = {0521642981},
  issn = {02635747},
  journal = {Learning},
  number = {3},
  pages = {348--349},
  title = {{Information Theory , Inference , and Learning Algorithms}},
  url = {http://www.journals.cambridge.org/abstract\_S026357470426043X},
  volume = {22},
  year = {2003}
}
@article{Bressloff2011,
  abstract = {We survey recent analytical approaches to studying the spatiotemporal dynamics of continuum neural fields. Neural fields model the large-scale dynamics of spatially structured biological neural networks in terms of nonlinear integrodifferential equations whose associated integral kernels represent the spatial distribution of neuronal synaptic connections. They provide an important example of spatially extended excitable systems with nonlocal interactions and exhibit a wide range of spatially coherent dynamics including traveling waves oscillations and Turing-like patterns.},
  author = {Bressloff, Paul C},
  doi = {10.1088/1751-8113/45/3/033001},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/Mean Field Theory/Spatiotemporal dynamics of continuum neural fields Bressloff.pdf:pdf},
  issn = {1751-8113},
  journal = {Journal of Physics A: Mathematical and Theoretical},
  mendeley-groups = {Field Models,SA15 1 MeanField},
  number = {3},
  pages = {033001},
  title = {{Spatiotemporal dynamics of continuum neural fields}},
  volume = {45},
  year = {2011}
}
@misc{Reichl1999,
  abstract = {Going beyond traditional textbook topics, 'A Modern Course in Statistical Physics' incorporates contemporary research in a basic course on statistical mechanics. From the universal nature of matter to the latest results in the spectral properties of decay processes, this book emphasizes the theoretical foundations derived from thermodynamics and probability theory underlying all concepts in statistical physics. This completely revised and updated third edition continues the comprehensive coverage of numerous core topics and special applications, allowing professors flexibility in designing individualized courses. The inclusion of advanced topics and extensive references makes this an invaluable resource for researchers as well as students -- a textbook that will be kept on the shelf long after the course is completed.},
  author = {Reichl, L. E.},
  booktitle = {American Journal of Physics},
  doi = {10.1119/1.19118},
  isbn = {978-3-527-40782-8},
  issn = {00029505},
  mendeley-groups = {SA15 1 MeanField,SA15 3 IsingPhysics},
  number = {12},
  pages = {1285},
  title = {{A Modern Course in Statistical Physics, 2nd Edition}},
  volume = {67},
  year = {1999}
}
@book{Schwabl2006,
  abstract = {"Das Buch eignet sich ausgezeichnet als Grundlage oder Erganzungslekture fur eine theoretische Vorlesung ab dem 5. Semester. Es deckt praktisch alle ... ublichen Inhalte ab, geht aber teilweise auch wesentlich daruber hinaus ... Zusammenfassend kann dieses Buch sowohl als Begleittext zu einer Vorlesung wie auch als Nachschlagewerk warmstens empfohlen werden." (Physikalische Blatter) ..". Die Fulle des behandelten Stoffes ist beeindruckend ... kann Studenten der ... Chemie, ... Physik und verwandter Disziplinen nachdrucklich empfohlen werden. Aber auch als Lehr- und Nachschlagewerk ist es geeignet." (Zeitschrift fur Physikalische Chemie)},
  author = {Schwabl, Franz},
  booktitle = {New York},
  doi = {10.1007/3-540-31097-5},
  isbn = {9783540310952},
  issn = {0009-286X},
  mendeley-groups = {SA15 1 MeanField,SA15 3 IsingPhysics},
  pages = {591},
  title = {{Statistische Mechanik}},
  url = {http://www.springerlink.com/index/10.1007/3-540-31097-5},
  year = {2006}
}
@book{Gerstner2014,
  address = {Cambridge},
  author = {Gerstner, Wulfram and Kistler, Werner M and Naud, Richard and Paninski, Liam},
  edition = {New.},
  isbn = {978-1-107-06083-8},
  mendeley-groups = {SA15 1 MeanField,SA15 2 Balanced},
  publisher = {Cambridge University Press},
  title = {{Neuronal Dynamics - From Single Neurons to Networks and Models of Cognition}},
  year = {2014}
}
@book{Ermentrout2010,
  address = {Berlin Heidelberg},
  author = {Ermentrout, G Bard and Terman, David H},
  edition = {2010. Aufl.},
  isbn = {978-0-387-87707-5},
  mendeley-groups = {SA15 1 MeanField},
  publisher = {Springer Science \& Business Media},
  title = {{Mathematical Foundations of Neuroscience -}},
  year = {2010}
}
@article{Isaacson2011,
  abstract = {Cortical processing reflects the interplay of synaptic excitation and synaptic inhibition. Rapidly accumulating evidence is highlighting the crucial role of inhibition in shaping spontaneous and sensory-evoked cortical activity and thus underscores how a better knowledge of inhibitory circuits is necessary for our understanding of cortical function. We discuss current views of how inhibition regulates the function of cortical neurons and point to a number of important open questions. ?? 2011 Elsevier Inc.},
  annote = {presented by Sara on the first date of the journal club cortex},
  author = {Isaacson, Jeffry S. and Scanziani, Massimo},
  doi = {10.1016/j.neuron.2011.09.027},
  file = {:Users/marcleo/Library/Application Support/Mendeley Desktop/Downloaded/Isaacson, Scanziani - 2011 - How inhibition shapes cortical activity.pdf:pdf},
  isbn = {1097-4199 (Electronic)$\backslash$r0896-6273 (Linking)},
  issn = {08966273},
  journal = {Neuron},
  keywords = {Balanced Networks},
  mendeley-groups = {Journal Club Cortex,SA15 2 Balanced},
  mendeley-tags = {Balanced Networks},
  number = {2},
  pages = {231--243},
  pmid = {22017986},
  publisher = {Elsevier Inc.},
  title = {{How inhibition shapes cortical activity}},
  url = {http://dx.doi.org/10.1016/j.neuron.2011.09.027},
  volume = {72},
  year = {2011}
}
@article{Okun2008,
  abstract = {Temporal and quantitative relations between excitatory and inhibitory inputs in the cortex are central to its activity, yet they remain poorly understood. In particular, a controversy exists regarding the extent of correlation between cortical excitation and inhibition. Using simultaneous intracellular recordings in pairs of nearby neurons in vivo, we found that excitatory and inhibitory inputs are continuously synchronized and correlated in strength during spontaneous and sensory-evoked activities in the rat somatosensory cortex.},
  author = {Okun, Michael and Lampl, Ilan},
  doi = {10.1038/nn.2105},
  file = {:Users/marcleo/Desktop/Paper/Neuronal Networks/Balanced Networks/OkunLampl2008.pdf:pdf},
  isbn = {1097-6256},
  issn = {1097-6256},
  journal = {Nature neuroscience},
  keywords = {Balanced Networks},
  mendeley-groups = {Neuronal Networks,SA15 2 Balanced},
  mendeley-tags = {Balanced Networks},
  number = {5},
  pages = {535--537},
  pmid = {18376400},
  title = {{Instantaneous correlation of excitation and inhibition during ongoing and sensory-evoked activities.}},
  volume = {11},
  year = {2008}
}
@article{Shadlen1998,
  abstract = {Cortical neurons exhibit tremendous variability in the number and temporal distribution of spikes in their discharge patterns. Furthermore, this variability appears to be conserved over large regions of the cerebral cortex, suggesting that it is neither reduced nor expanded from stage to stage within a processing pathway. To investigate the principles underlying such statistical homogeneity, we have analyzed a model of synaptic integration incorporating a highly simplified integrate and fire mechanism with decay. We analyzed a "high-input regime" in which neurons receive hundreds of excitatory synaptic inputs during each interspike interval. To produce a graded response in this regime, the neuron must balance excitation with inhibition. We find that a simple integrate and fire mechanism with balanced excitation and inhibition produces a highly variable interspike interval, consistent with experimental data. Detailed information about the temporal pattern of synaptic inputs cannot be recovered from the pattern of output spikes, and we infer that cortical neurons are unlikely to transmit information in the temporal pattern of spike discharge. Rather, we suggest that quantities are represented as rate codes in ensembles of 50-100 neurons. These column-like ensembles tolerate large fractions of common synaptic input and yet covary only weakly in their spike discharge. We find that an ensemble of 100 neurons provides a reliable estimate of rate in just one interspike interval (10-50 msec). Finally, we derived an expression for the variance of the neural spike count that leads to a stable propagation of signal and noise in networks of neurons-that is, conditions that do not impose an accumulation or diminution of noise. The solution implies that single neurons perform simple algebra resembling averaging, and that more sophisticated computations arise by virtue of the anatomical convergence of novel combinations of inputs to the cortical column from external sources.},
  author = {Shadlen, M N and Newsome, W T},
  file = {:Users/marcleo/Desktop/Paper/Neuronal Networks/ShedlenNewsome.pdf:pdf},
  isbn = {0270-6474 (Print) 0270-6474 (Linking)},
  issn = {0270-6474},
  journal = {The Journal of neuroscience : the official journal of the Society for Neuroscience},
  keywords = {Balanced Networks,ascertain which view is,correct if we knew,correlation,in principle we could,interspike interval,neural model,noise,rate code,response variability,spike count variance,synaptic integration,temporal coding,visual cortex},
  mendeley-groups = {Neuronal Networks,SA15 2 Balanced},
  mendeley-tags = {Balanced Networks},
  number = {10},
  pages = {3870--3896},
  pmid = {9570816},
  title = {{The variable discharge of cortical neurons: implications for connectivity, computation, and information coding.}},
  volume = {18},
  year = {1998}
}
@article{VanVreeswijk1998,
  abstract = {The nature and origin of the temporal irregularity in the electrical activity of cortical neurons in vivo are not well understood. We consider the hypothesis that this irregularity is due to a balance of excitatory and inhibitory currents into the cortical cells. We study a network model with excitatory and inhibitory populations of simple binary units. The internal feedback is mediated by relatively large synaptic strengths, so that the magnitude of the total excitatory and inhibitory feedback is much larger than the neuronal threshold. The connectivity is random and sparse. The mean number of connections per unit is large, though small compared to the total number of cells in the network. The network also receives a large, temporally regular input from external sources. We present an analytical solution of the mean-field theory of this model, which is exact in the limit of large network size. This theory reveals a new cooperative stationary state of large networks, which we term a balanced state. In this state, a balance between the excitatory and inhibitory inputs emerges dynamically for a wide range of parameters, resulting in a net input whose temporal fluctuations are of the same order as its mean. The internal synaptic inputs act as a strong negative feedback, which linearizes the population responses to the external drive despite the strong nonlinearity of the individual cells. This feedback also greatly stabilizes the system's state and enables it to track a time-dependent input on time scales much shorter than the time constant of a single cell. The spatiotemporal statistics of the balanced state are calculated. It is shown that the autocorrelations decay on a short time scale, yielding an approximate Poissonian temporal statistics. The activity levels of single cells are broadly distributed, and their distribution exhibits a skewed shape with a long power-law tail. The chaotic nature of the balanced state is revealed by showing that the evolution of the microscopic state of the network is extremely sensitive to small deviations in its initial conditions. The balanced state generated by the sparse, strong connections is an asynchronous chaotic state. It is accompanied by weak spatial cross-correlations, the strength of which vanishes in the limit of large network size. This is in contrast to the synchronized chaotic states exhibited by more conventional network models with high connectivity of weak synapses.},
  author = {van Vreeswijk, C and Sompolinsky, H},
  doi = {10.1162/089976698300017214},
  file = {:Users/marcleo/Desktop/vanVreeswijkSompolinsky.pdf:pdf},
  isbn = {0899-7667 (Print)$\backslash$n0899-7667 (Linking)},
  issn = {0899-7667},
  journal = {Neural computation},
  mendeley-groups = {Neuronal Networks,SA15 2 Balanced},
  number = {6},
  pages = {1321--1371},
  pmid = {9698348},
  title = {{Chaotic balanced state in a model of cortical circuits.}},
  volume = {10},
  year = {1998}
}
@article{VanVreeswijk1996,
  abstract = {Neurons in the cortex of behaving animals show temporally irregular spiking patterns. The origin of this irregularity and its implications for neural processing are unknown. The hypothesis that the temporal variability in the firing of a neuron results from an approximate balance between its excitatory and inhibitory inputs was investigated theoretically. Such a balance emerges naturally in large networks of excitatory and inhibitory neuronal populations that are sparsely connected by relatively strong synapses. The resulting state is characterized by strongly chaotic dynamics, even when the external inputs to the network are constant in time. Such a network exhibits a linear response, despite the highly nonlinear dynamics of single neurons, and reacts to changing external stimuli on time scales much smaller than the integration time constant of a single neuron.},
  author = {van Vreeswijk, C and Sompolinsky, H},
  doi = {10.1126/science.274.5293.1724},
  file = {:Users/marcleo/Desktop/Paper/Neuronal Networks/vV$\backslash$:S1996.pdf:pdf},
  isbn = {00368075 (ISSN)},
  issn = {0036-8075},
  journal = {Science (New York, N.Y.)},
  mendeley-groups = {Neuronal Networks,SA15 2 Balanced},
  number = {5293},
  pages = {1724--1726},
  pmid = {8939866},
  title = {{Chaos in neuronal networks with balanced excitatory and inhibitory activity.}},
  volume = {274},
  year = {1996}
}
@misc{Cipra,
  author = {Cipra, Barry A.},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/Ising Model Physics/cipra\_ising.pdf:pdf},
  mendeley-groups = {SA15 3 IsingPhysics},
  title = {{An Introduction to the Ising Model}}
}
@article{Schneidman2006,
  abstract = {Biological networks have so many possible states that exhaustive sampling is impossible. Successful analysis thus depends on simplifying hypotheses, but experiments on many systems hint that complicated, higher-order interactions among large groups of elements have an important role. Here we show, in the vertebrate retina, that weak correlations between pairs of neurons coexist with strongly collective behaviour in the responses of ten or more neurons. We find that this collective behaviour is described quantitatively by models that capture the observed pairwise correlations but assume no higher-order interactions. These maximum entropy models are equivalent to Ising models, and predict that larger networks are completely dominated by correlation effects. This suggests that the neural code has associative or error-correcting properties, and we provide preliminary evidence for such behaviour. As a first test for the generality of these ideas, we show that similar results are obtained from networks of cultured cortical neurons.},
  annote = {High Priority},
  archiveprefix = {arXiv},
  arxivid = {q-bio/0512013},
  author = {Schneidman, Elad and Berry, Michael J and Segev, Ronen and Bialek, William},
  doi = {10.1038/nature04701},
  eprint = {0512013},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/Ising Model in Neuroscience/Schneidmann2006.pdf:pdf},
  isbn = {1476-4687 (Electronic)},
  issn = {0028-0836},
  journal = {Nature},
  mendeley-groups = {SA15 4 IsingNeuro},
  number = {7087},
  pages = {1007--1012},
  pmid = {16625187},
  primaryclass = {q-bio},
  title = {{Weak pairwise correlations imply strongly correlated network states in a neural population.}},
  volume = {440},
  year = {2006}
}
@article{Shemesh2013,
  abstract = {Social behavior in mammals is often studied in pairs under artificial conditions, yet groups may rely on more complicated social structures. Here, we use a novel system for tracking multiple animals in a rich environment to characterize the nature of group behavior and interactions, and show strongly correlated group behavior in mice. We have found that the minimal models that rely only on individual traits and pairwise correlations between animals are not enough to capture group behavior, but that models that include third-order interactions give a very accurate description of the group. These models allow us to infer social interaction maps for individual groups. Using this approach, we show that environmental complexity during adolescence affects the collective group behavior of adult mice, in particular altering the role of high-order structure. Our results provide new experimental and mathematical frameworks for studying group behavior and social interactions. DOI:http://dx.doi.org/10.7554/eLife.00759.001.},
  author = {Shemesh, Yair and Sztainberg, Yehezkel and Forkosh, Oren and Shlapobersky, Tamar and Chen, Alon and Schneidman, Elad},
  doi = {10.7554/eLife.00759},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/Ising Model in Neuroscience/elifearticle.pdf:pdf},
  isbn = {2050-084X (Electronic)},
  issn = {2050084X},
  journal = {eLife},
  mendeley-groups = {SA15 4 IsingNeuro},
  number = {2},
  pages = {1--19},
  pmid = {24015357},
  title = {{High-order social interactions in groups of mice}},
  volume = {2013},
  year = {2013}
}
@article{Tang2008,
  abstract = {Multineuron firing patterns are often observed, yet are predicted to be rare by models that assume independent firing. To explain these correlated network states, two groups recently applied a second-order maximum entropy model that used only observed firing rates and pairwise interactions as parameters (Schneidman et al., 2006; Shlens et al., 2006). Interestingly, with these minimal assumptions they predicted 90-99\% of network correlations. If generally applicable, this approach could vastly simplify analyses of complex networks. However, this initial work was done largely on retinal tissue, and its applicability to cortical circuits is mostly unknown. This work also did not address the temporal evolution of correlated states. To investigate these issues, we applied the model to multielectrode data containing spontaneous spikes or local field potentials from cortical slices and cultures. The model worked slightly less well in cortex than in retina, accounting for 88 +/- 7\% (mean +/- SD) of network correlations. In addition, in 8 of 13 preparations, the observed sequences of correlated states were significantly longer than predicted by concatenating states from the model. This suggested that temporal dependencies are a common feature of cortical network activity, and should be considered in future models. We found a significant relationship between strong pairwise temporal correlations and observed sequence length, suggesting that pairwise temporal correlations may allow the model to be extended into the temporal domain. We conclude that although a second-order maximum entropy model successfully predicts correlated states in cortical networks, it should be extended to account for temporal correlations observed between states.},
  author = {Tang, Aonan and Jackson, David and Hobbs, Jon and Chen, Wei and Smith, Jodi L and Patel, Hema and Prieto, Anita and Petrusca, Dumitru and Grivich, Matthew I and Sher, Alexander and Hottowy, Pawel and Dabrowski, Wladyslaw and Litke, Alan M and Beggs, John M},
  doi = {10.1523/JNEUROSCI.3359-07.2008},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/Ising Model in Neuroscience/GoodAlternative.pdf:pdf},
  issn = {1529-2401},
  journal = {The Journal of neuroscience : the official journal of the Society for Neuroscience},
  keywords = {culture,human tissue,local field potential,microelectrode array,neuronal avalanche,slice},
  mendeley-groups = {SA15 4 IsingNeuro},
  number = {2},
  pages = {505--518},
  pmid = {18184793},
  title = {{A maximum entropy model applied to spatial and temporal correlations from cortical networks in vitro.}},
  volume = {28},
  year = {2008}
}
@article{Tkacik2009,
  abstract = {Ising models with pairwise interactions are the least structured, or maximum-entropy, probability distributions that exactly reproduce measured pairwise correlations between spins. Here we use this equivalence to construct Ising models that describe the correlated spiking activity of populations of 40 neurons in the salamander retina responding to natural movies. We show that pairwise interactions between neurons account for observed higher-order correlations, and that for groups of 10 or more neurons pairwise interactions can no longer be regarded as small perturbations in an independent system. We then construct network ensembles that generalize the network instances observed in the experiment, and study their thermodynamic behavior and coding capacity. Based on this construction, we can also create synthetic networks of 120 neurons, and find that with increasing size the networks operate closer to a critical point and start exhibiting collective behaviors reminiscent of spin glasses. We examine closely two such behaviors that could be relevant for neural code: tuning of the network to the critical point to maximize the ability to encode diverse stimuli, and using the metastable states of the Ising Hamiltonian as neural code words.},
  annote = {High Priority},
  archiveprefix = {arXiv},
  arxivid = {0912.5409},
  author = {Tkacik, Gasper and Schneidman, Elad and Berry, Michael J. and Bialek, William},
  eprint = {0912.5409},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/Ising Model in Neuroscience/Schneidmann2009.pdf:pdf},
  keywords = {correlation,entropy,information,monte carlo,multi,neural networks},
  mendeley-groups = {SA15 4 IsingNeuro},
  number = {1},
  pages = {1--15},
  title = {{Spin glass models for a network of real neurons}},
  url = {http://arxiv.org/abs/0912.5409},
  year = {2009}
}
@book{Harris2007,
  address = {San Francisco},
  author = {Harris, David and Harris, Sarah},
  isbn = {978-0-123-70497-9},
  mendeley-groups = {SA15 5 LogicGates},
  publisher = {Morgan Kaufmann Publishers},
  title = {{Digital Design and Computer Architecture -}},
  year = {2007}
}
@book{Bishop2006,
  abstract = {The dramatic growth in practical applications for machine learning over the last ten years has been accompanied by many important developments in the underlying algorithms and techniques. For example, Bayesian methods have grown from a specialist niche to become mainstream, while graphical models have emerged as a general framework for describing and applying probabilistic techniques. The practical applicability of Bayesian methods has been greatly enhanced by the development of a range of approximate inference algorithms such as variational Bayes and expectation propagation, while new models based on kernels have had a significant impact on both algorithms and applications. This completely new textbook reflects these recent developments while providing a comprehensive introduction to the fields of pattern recognition and machine learning. It is aimed at advanced undergraduates or first-year PhD students, as well as researchers and practitioners. No previous knowledge of pattern recognition or machine learning concepts is assumed. Familiarity with multivariate calculus and basic linear algebra is required, and some experience in the use of probabilities would be helpful though not essential as the book includes a self-contained introduction to basic probability theory. The book is suitable for courses on machine learning, statistics, computer science, signal processing, computer vision, data mining, and bioinformatics. Extensive support is provided for course instructors, including more than 400 exercises, graded according to difficulty. Example solutions for a subset of the exercises are available from the book web site, while solutions for the remainder can be obtained by instructors from the publisher. The book is supported by a great deal of additional material, and the reader is encouraged to visit the book web site for the latest information. A forthcoming companion volume will deal with practical aspects of pattern recognition and machine learning, and will include free software implementations of the key algorithms along with example data sets and demonstration programs. Christopher Bishop is Assistant Director at Microsoft Research Cambridge, and also holds a Chair in Computer Science at the University of Edinburgh. He is a Fellow of Darwin College Cambridge, and was recently elected Fellow of the Royal Academy of Engineering. The author's previous textbook "Neural Networks for Pattern Recognition" has been widely adopted.},
  archiveprefix = {arXiv},
  arxivid = {0-387-31073-8},
  author = {Bishop, Christopher M},
  booktitle = {Pattern Recognition},
  doi = {10.1117/1.2819119},
  eprint = {0-387-31073-8},
  isbn = {9780387310732},
  issn = {10179909},
  mendeley-groups = {SA15 6 Perceptron,SA15 7 IntroMLCl,SA15 8 FeedFwdBack},
  pages = {738},
  pmid = {8943268},
  title = {{Pattern Recognition and Machine Learning}},
  url = {http://www.library.wisc.edu/selectedtocs/bg0137.pdf},
  volume = {4},
  year = {2006}
}
@book{Mitchell1997,
  abstract = {This book covers the field of machine learning, which is the study of algorithms that allow computer programs to automatically improve through experience. The book is intended to support upper level undergraduate and introductory level graduate courses in machine learning.},
  author = {Mitchell, Tom M},
  booktitle = {Machine Learning},
  doi = {10.1007/BF00116892},
  isbn = {0070428077},
  issn = {08856125},
  mendeley-groups = {SA15 6 Perceptron,SA15 7 IntroMLCl,SA15 8 FeedFwdBack},
  number = {3},
  pages = {432},
  pmid = {20236947},
  title = {{Machine Learning}},
  url = {http://www.amazon.com/Machine-Learning-Tom-M-Mitchell/dp/0070428077},
  volume = {1},
  year = {1997}
}
@book{Haykin2008,
  abstract = {Fluid and authoritative, this well-organized book represents the first comprehensive treatment of neural networks from an engineering perspective, providing extensive, state-of-the-art coverage that will expose readers to the myriad facets of neural networks and help them appreciate the technology's origin, capabilities, and potential applications.Examines all the important aspects of this emerging technolgy, covering the learning process, back propogation, radial basis functions, recurrent networks, self-organizing systems, modular networks, temporal processing, neurodynamics, and VLSI implementation. Integrates computer experiments throughout to demonstrate how neural networks are designed and perform in practice. Chapter objectives, problems, worked examples, a bibliography, photographs, illustrations, and a thorough glossary all reinforce concepts throughout. New chapters delve into such areas as support vector machines, and reinforcement learning/neurodynamic programming, plus readers will find an entire chapter of case studies to illustrate the real-life, practical applications of neural networks. A highly detailed bibliography is included for easy reference.For professional engineers and research scientists.},
  author = {Haykin, Simon},
  booktitle = {Pearson Prentice Hall New Jersey USA 936 pLinks},
  isbn = {9780131471399},
  mendeley-groups = {SA15 6 Perceptron},
  number = {v. 10},
  pages = {906},
  title = {{Neural Networks and Learning Machines}},
  url = {http://www.amazon.com/dp/0131471392},
  volume = {3},
  year = {2008}
}
@unpublished{ScholarpediaCompNeuro,
  mendeley-groups = {SA15 6 Perceptron,SA15 7 IntroMLCl,SA15 8 FeedFwdBack},
  title = {{Scholarpedia Computational Neuroscience}},
  url = {http://www.scholarpedia.org/article/Encyclopedia:Computational\_neuroscience}
}
@unpublished{ScholarpediaCompInt,
  mendeley-groups = {SA15 6 Perceptron,SA15 7 IntroMLCl,SA15 8 FeedFwdBack},
  title = {{Scholarpedia Computational Intelligence}},
  url = {http://www.scholarpedia.org/article/Encyclopedia:Computational\_intelligence}
}
@unpublished{CourseeraAndrewNg,
  annote = {Coursera Online Lectures},
  author = {Ng, Andrew},
  mendeley-groups = {SA15 7 IntroMLCl,SA15 8 FeedFwdBack,SA15 9 MLEthics},
  title = {{Machine Learning}},
  url = {https://www.coursera.org/learn/machine-learning/home/info}
}
@article{Floreano2015,
  annote = {Priority},
  author = {Floreano, Dario and Wood, Robert J.},
  doi = {10.1038/nature14542},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/PresentationAndDiscussionOfApplicationOfML/AutonomousDrones.pdf:pdf},
  issn = {0028-0836},
  journal = {Nature},
  mendeley-groups = {SA15 9 MLEthics},
  number = {7553},
  pages = {460--466},
  title = {{Science, technology and the future of small autonomous drones}},
  url = {http://www.nature.com/doifinder/10.1038/nature14542},
  volume = {521},
  year = {2015}
}
@article{LeCun2015,
  annote = {Priority},
  author = {LeCun, Yann and Bengio, Yoshua and Hinton, Geoffrey},
  doi = {10.1038/nature14539},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/Einf\"{u}hrungMLundKlassifizierungsalgorithmen/DeepLearning.pdf:pdf},
  issn = {0028-0836},
  journal = {Nature},
  mendeley-groups = {SA15 9 MLEthics},
  number = {7553},
  pages = {436--444},
  title = {{Deep learning}},
  url = {http://www.nature.com/doifinder/10.1038/nature14539},
  volume = {521},
  year = {2015}
}
@article{Littman2015,
  annote = {Priority},
  author = {Littman, Michael L.},
  doi = {10.1038/nature14540},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/PresentationAndDiscussionOfApplicationOfML/ReinforcementEvaluativeFeedback.pdf:pdf},
  issn = {0028-0836},
  journal = {Nature},
  mendeley-groups = {SA15 9 MLEthics},
  number = {7553},
  pages = {445--451},
  title = {{Reinforcement learning improves behaviour from evaluative feedback}},
  url = {http://www.nature.com/doifinder/10.1038/nature14540},
  volume = {521},
  year = {2015}
}
@article{Rus2015,
  annote = {Priority},
  author = {Rus, Daniela and Tolley, Michael T.},
  doi = {10.1038/nature14543},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/PresentationAndDiscussionOfApplicationOfML/Designfabricationandcontrolofsoftrobots.pdf:pdf},
  issn = {0028-0836},
  journal = {Nature},
  mendeley-groups = {SA15 9 MLEthics},
  number = {7553},
  pages = {467--475},
  title = {{Design, fabrication and control of soft robots}},
  url = {http://www.nature.com/doifinder/10.1038/nature14543},
  volume = {521},
  year = {2015}
}
@article{Russel2015,
  annote = {Priority},
  author = {Russel, S},
  file = {:Users/marcleo/Desktop/Events/Sommerakademie2015/References/PresentationAndDiscussionOfApplicationOfML/Ethics.pdf:pdf},
  journal = {Nature},
  mendeley-groups = {SA15 9 MLEthics},
  title = {{Ethics of artificial intelligence}},
  year = {2015}
}

This file was generated by bibtex2html 1.96.