You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
abstract = {The aim is to build a bridge between several point processes models (Poisson, Wold, Hawkes) that have been proved to statistically fit real spike trains data and an age-structured partial differential equation known as Refractory Density Equation and mathematically studied by Pakdaman, Perthame and Salort.},
keywords = {refractory density equation, point process, mean field},
53
55
hal = {tel-01415622},
54
56
abstract = {This manuscript focuses on biological neural networks and their modelling. It lies in between three domains of mathematics - the study of partial differential equations (PDE), probabilities and statistics - and deals with their application to neuroscience. On the one hand, the bridges between two neural network models, involving two different scales, are highlighted. At a microscopic scale, the electrical activity of each neuron is described by a temporal point process. At a larger scale, an age structured system of PDE gives the global activity. There are two ways to derive the macroscopic model (PDE system) starting from the microscopic one: by studying the mean dynamics of one typical neuron or by investigating the dynamics of a mean-field network of n neurons when n goes to infinity.},
abstract = {A functional central limit theorem is proved for mean-field interacting age-dependent Hawkes processes. It gives theoretical foundation for the stochastic Refractory Density Equation.},
abstract = {A functional law of large numbers (propagation of chaos) is proved for mean-field interacting age-dependent Hawkes processes. It gives theoretical foundation for the Refractory Density Equation.},
abstract = {Stimulus sensitivity of the stationary Refractory Density Equation is studied. It appears that the maximal sensitivity is achieved in the sub-critical regime, yet almost critical for a range of biologically relevant parameters.},
abstract = {An elementary construction of a uniform decomposition of probability measures in dimension $d \geq 1$ is provided. This decomposition is then used to give upper-bounds on the rate of convergence of the optimal uniform approximation error.},
155
+
preview = {decomposition.png},
149
156
dimensions = {true},
150
157
altmetric = {true},
151
158
bibtex_show = {true}
@@ -165,6 +172,7 @@ @article{chevallier2018mean
165
172
arxiv = {1703.05031},
166
173
hal = {hal-01489278},
167
174
abstract = {A functional law of large numbers (propagation of chaos) is proved for mean-field interacting Hawkes processes with spatial structure. It gives theoretical foundation for the Neural Field Equation.},
abstract = {A functional central limit theorem is proved for mean-field interacting Hawkes processes with a spatial structure. It gives theoretical foundation for the stochastic Neural Field Equation.},
abstract = {The oscillatory systems of interacting Hawkes processes with Erlang memory kernels introduced by Ditlevsen and Löcherbach (Stoch. Process. Appl., 2017) is studied. First, a strong diffusion approximation result is proved. Second, moment bounds for the resulting diffusion are derived. Third, approximation schemes for the diffusion, based on the numerical splitting approach, are proposed. These schemes are proved to converge with mean-square order 1 and to preserve the properties of the diffusion, in particular the hypoellipticity, the ergodicity, and the moment bounds. Finally, the PDMP and the diffusion are compared through numerical experiments, where the PDMP is simulated with an adapted thinning procedure.},
abstract = {A huge number $N$ of components are partitioned into two communities (excitatory and inhibitory). They are connected via a directed and weighted Erdös-Rényi random graph (DWER) with unknown parameter $p$. At each time unit, we observe the state of each component: either it sends some signal to its successors (in the directed graph) or remain silent otherwise. In this paper, we show that it is possible to find the communities based only on the activity of the $N$ components observed over $T$ time units. We propose a simple algorithm for which the probability of exact recovery converges to 1 for a specific asymptotic regime.},
abstract = {A huge number $N$ of components are partitioned into two communities (excitatory and inhibitory). They are connected via a directed and weighted Erdös-Rényi random graph (DWER) with unknown parameter $p$. At each time unit, we observe the state of each component: either it sends some signal to its successors (in the directed graph) or remain silent otherwise. In this paper, we show that it is possible to infer the connectivity parameter $p$ based only on the activity of the $N$ components observed over $T$ time units. We propose a simple algorithm for which the connectivity parameter $p$ can be estimated with a specific rate which appears to be optimal in a simpler framework.},
abstract = {This paper establishes the theoretical foundation for statistical applications of an intriguing new type of spatial point processes called critical point processes. We provide explicit expressions for fundamental moment characteristics used in spatial point process statistics like the intensity parameter, the pair correlation function, and higher order intensity functions. The crucial dependence structure (attraction or repulsiveness) of a critical point process is discussed in depth. We propose simulation strategies based on spectral methods or smoothing of grid-based simulations and show that resulting approximate critical point process simulations asymptotically converge to the exact critical point process distribution. Finally, under the increasing domain framework, we obtain asymptotic results for linear and bilinear statistics of a critical point process. In particular, we obtain a multivariate central limit theorem for the intensity parameter estimate and a modified version of Ripley's K-function.},
abstract = {This manuscript focuses on biological neural networks and their modelling. It lies in between three domains of mathematics - the study of partial differential equations (PDE), probabilities and statistics - and deals with their application to neuroscience. On the one hand, the bridges between two neural network models, involving two different scales, are highlighted. At a microscopic scale, the electrical activity of each neuron is described by a temporal point process. At a larger scale, an age structured system of PDE gives the global activity. There are two ways to derive the macroscopic model (PDE system) starting from the microscopic one: by studying the mean dynamics of one typical neuron or by investigating the dynamics of a mean-field network of n neurons when n goes to infinity.},
0 commit comments