Preprints
[1] 
Peherstorfer, B. Multifidelity Monte Carlo estimation with adaptive lowfidelity models. University of WisconsinMadison, Technical Report, 2017. [BibTeX]@techreport{P17AMFMC,
title = {Multifidelity Monte Carlo estimation with adaptive lowfidelity models},
author = {Peherstorfer, B.},
volume = {Technical Report},
year = {2017},
institution = {University of WisconsinMadison},
} 
[2] 
Kramer, B., Marques, A., Peherstorfer, B., Villa, U. & Willcox, K. Multifidelity probability estimation via fusion of estimators. Massachusetts Institute of Technology, ACDL TR20173, 2017. [BibTeX]@techreport{KMPVW17Fusion,
title = {Multifidelity probability estimation via fusion of estimators},
author = {Kramer, B. and Marques, A. and Peherstorfer, B. and Villa, U. and Willcox, K.},
volume = {ACDL TR20173},
year = {2017},
institution = {Massachusetts Institute of Technology},
} 


Journal publications
[1] 
Peherstorfer, B., Kramer, B. & Willcox, K. Multifidelity preconditioning of the crossentropy method for rare event simulation and failure probability estimation. SIAM/ASA Journal on Uncertainty Quantification, 6(2):737761, 2018. [Abstract]Abstract Accurately estimating rare event probabilities with Monte Carlo can become costly if for each sample a computationally expensive highfidelity model evaluation is necessary to approximate the system response. Variance reduction with importance sampling significantly reduces the number of required samples if a suitable biasing density is used. This work introduces a multifidelity approach that leverages a hierarchy of lowcost surrogate models to efficiently construct biasing densities for importance sampling. Our multifidelity approach is based on the crossentropy method that derives a biasing density via an optimization problem. We approximate the solution of the optimization problem at each level of the surrogatemodel hierarchy, reusing the densities found on the previous levels to precondition the optimization problem on the subsequent levels. With the preconditioning, an accurate approximation of the solution of the optimization problem at each level can be obtained from a few model evaluations only. In particular, at the highest level, only few evaluations of the computationally expensive highfidelity model are necessary. Our numerical results demonstrate that our multifidelity approach achieves speedups of several orders of magnitude in a thermal and a reactingflow example compared to the singlefidelity crossentropy method that uses a single model alone. [BibTeX]@article{PKW17MFCE,
title = {Multifidelity preconditioning of the crossentropy method for rare event simulation and failure probability estimation},
author = {Peherstorfer, B. and Kramer, B. and Willcox, K.},
journal = {SIAM/ASA Journal on Uncertainty Quantification},
volume = {6},
number = {2},
pages = {737761},
year = {2018},
} 
[2] 
Peherstorfer, B., Gunzburger, M. & Willcox, K. Convergence analysis of multifidelity Monte Carlo estimation. Numerische Mathematik, 139(3):683707, 2018. [Abstract]Abstract The multifidelity Monte Carlo method provides a general framework for combining cheap lowfidelity approximations of an expensive highfidelity model to accelerate the Monte Carlo estimation of statistics of the highfidelity model output. In this work, we investigate the properties of multifidelity Monte Carlo estimation in the setting where a hierarchy of approximations can be constructed with known error and cost bounds. Our main result is a convergence analysis of multifidelity Monte Carlo estimation, for which we prove a bound on the costs of the multifidelity Monte Carlo estimator under assumptions on the error and cost bounds of the lowfidelity approximations. The assumptions that we make are typical in the setting of similar Monte Carlo techniques. Numerical experiments illustrate the derived bounds. [BibTeX]@article{PWK16MFMCAsymptotics,
title = {Convergence analysis of multifidelity Monte Carlo estimation},
author = {Peherstorfer, B. and Gunzburger, M. and Willcox, K.},
journal = {Numerische Mathematik},
volume = {139},
number = {3},
pages = {683707},
year = {2018},
} 
[3] 
Qian, E., Peherstorfer, B., O'Malley, D., Vesselinov, V.V. & Willcox, K. Multifidelity Monte Carlo Estimation of Variance and Sensitivity Indices. SIAM/ASA Journal on Uncertainty Quantification, 6(2):683706, 2018. [Abstract]Abstract Variancebased sensitivity analysis provides a quantitative measure of how uncertainty in a model input contributes to uncertainty in the model output. Such sensitivity analyses arise in a wide variety of applications and are typically computed using Monte Carlo estimation, but the many samples required for Monte Carlo to be sufficiently accurate can make these analyses intractable when the model is expensive. This work presents a multifidelity approach for estimating sensitivity indices that leverages cheaper lowfidelity models to reduce the cost of sensitivity analysis while retaining accuracy guarantees via recourse to the original, expensive model. This paper develops new multifidelity estimators for variance and for the Sobol' main and total effect sensitivity indices. We discuss strategies for dividing limited computational resources among models and specify a recommended strategy. Results are presented for the Ishigami function and a convectiondiffusionreaction model that demonstrate up to 10x speedups for fixed convergence levels. For the problems tested, the multifidelity approach allows inputs to be definitively ranked in importance when Monte Carlo alone fails to do so. [BibTeX]@article{QPOVW17MFGSA,
title = {Multifidelity Monte Carlo Estimation of Variance and Sensitivity Indices},
author = {Qian, E. and Peherstorfer, B. and O'Malley, D. and Vesselinov, V.V. and Willcox, K.},
journal = {SIAM/ASA Journal on Uncertainty Quantification},
volume = {6},
number = {2},
pages = {683706},
year = {2018},
} 
[4] 
Baptista, R., Marzouk, Y., Willcox, K. & Peherstorfer, B. Optimal Approximations of Coupling in Multidisciplinary Models. AIAA Journal, 56:24122428, 2018. [Abstract]Abstract This paper presents a methodology for identifying important discipline couplings in multicomponent engineering systems. Coupling among disciplines contributes significantly to the computational cost of analyzing a system, and can become particularly burdensome when coupled analyses are embedded within a design or optimization loop. In many cases, disciplines may be weakly coupled, so that some of the coupling or interaction terms can be neglected without significantly impacting the accuracy of the system output. Typical practice derives such approximations in an ad hoc manner using expert opinion and domain experience. This work proposes a new approach that formulates an optimization problem to find a model that optimally balances accuracy of the model outputs with the sparsity of the discipline couplings. An adaptive sequential Monte Carlo samplingbased technique is used to efficiently search the combinatorial model space of different discipline couplings. An algorithm for selecting an optimal model is presented and illustrated in a fire detection satellite model and a turbine engine cycle analysis model. [BibTeX]@article{AIAADecouple18Baptista,
title = {Optimal Approximations of Coupling in Multidisciplinary Models},
author = {Baptista, R. and Marzouk, Y. and Willcox, K. and Peherstorfer, B.},
journal = {AIAA Journal},
volume = {56},
pages = {24122428},
year = {2018},
} 
[5] 
Zimmermann, R., Peherstorfer, B. & Willcox, K. Geometric subspace updates with applications to online adaptive nonlinear model reduction. SIAM Journal on Matrix Analysis and Applications, 39(1):234261, 2018. [Abstract]Abstract In many scientific applications, including model reduction and image processing, subspaces are used as ansatz spaces for the lowdimensional approximation and reconstruction of the state vectors of interest. We introduce a procedure for adapting an existing subspace based on information from the leastsquares problem that underlies the approximation problem of interest such that the associated leastsquares residual vanishes exactly. The method builds on a Riemmannian optimization procedure on the Grassmann manifold of lowdimensional subspaces, namely the Grassmannian RankOne Subspace Estimation (GROUSE). We establish for GROUSE a closedform expression for the residual function along the geodesic descent direction. Specific applications of subspace adaptation are discussed in the context of image processing and model reduction of nonlinear partial differential equation systems. [BibTeX]@article{ZPW17SIMAXManifold,
title = {Geometric subspace updates with applications to online adaptive nonlinear model reduction},
author = {Zimmermann, R. and Peherstorfer, B. and Willcox, K.},
journal = {SIAM Journal on Matrix Analysis and Applications},
volume = {39},
number = {1},
pages = {234261},
year = {2018},
} 
[6] 
Peherstorfer, B., Willcox, K. & Gunzburger, M. Survey of multifidelity methods in uncertainty propagation, inference, and optimization. SIAM Review, 2017. (accepted). [Abstract]Abstract In many situations across computational science and engineering, multiple computational models are available that describe a system of interest. These different models have varying evaluation costs and varying fidelities. Typically, a computationally expensive highfidelity model describes the system with the accuracy required by the current application at hand, while lowerfidelity models are less accurate but computationally cheaper than the highfidelity model. Outerloop applications, such as optimization, inference, and uncertainty quantification, require multiple model evaluations at many different inputs, which often leads to computational demands that exceed available resources if only the highfidelity model is used. This work surveys multifidelity methods that accelerate the solution of outerloop applications by combining highfidelity and lowfidelity model evaluations, where the lowfidelity evaluations arise from an explicit lowfidelity model (e.g., a simplified physics approximation, a reduced model, a datafit surrogate, etc.) that approximates the same output quantity as the highfidelity model. The overall premise of these multifidelity methods is that lowfidelity models are leveraged for speedup while the highfidelity model is kept in the loop to establish accuracy and/or convergence guarantees. We categorize multifidelity methods according to three classes of strategies: adaptation, fusion, and filtering. The paper reviews multifidelity methods in the outerloop contexts of uncertainty propagation, inference, and optimization. [BibTeX]@article{PWG17MultiSurvey,
title = {Survey of multifidelity methods in uncertainty propagation, inference, and optimization},
author = {Peherstorfer, B. and Willcox, K. and Gunzburger, M.},
journal = {SIAM Review},
year = {2017},
} 
[7] 
Peherstorfer, B., Gugercin, S. & Willcox, K. Datadriven reduced model construction with timedomain Loewner models. SIAM Journal on Scientific Computing, 39(5):A2152A2178, 2017. [Abstract]Abstract This work presents a datadriven nonintrusive model reduction approach for largescale timedependent systems with linear state dependence. Traditionally, model reduction is performed in an intrusive projectionbased framework, where the operators of the full model are required either explicitly in an assembled form or implicitly through a routine that returns the action of the operators on a vector. Our nonintrusive approach constructs reduced models directly from trajectories of the inputs and outputs of the full model, without requiring the fullmodel operators. These trajectories are generated by running a simulation of the full model; our method then infers frequencyresponse data from these simulated timedomain trajectories and uses the datadriven Loewner framework to derive a reduced model. Only a single timedomain simulation is required to derive a reduced model with the new datadriven nonintrusive approach. We demonstrate our model reduction method on several benchmark examples and a finite element model of a cantilever beam; our approach recovers the classical Loewner reduced models and, for these problems, yields highquality reduced models despite treating the full model as a black box. [BibTeX]@article{PSW16TLoewner,
title = {Datadriven reduced model construction with timedomain Loewner models},
author = {Peherstorfer, B. and Gugercin, S. and Willcox, K.},
journal = {SIAM Journal on Scientific Computing},
volume = {39},
number = {5},
pages = {A2152A2178},
year = {2017},
} 
[8] 
Peherstorfer, B., Kramer, B. & Willcox, K. Combining multiple surrogate models to accelerate failure probability estimation with expensive highfidelity models. Journal of Computational Physics, 341:6175, 2017. [Abstract]Abstract In failure probability estimation, importance sampling constructs a biasing distribution that targets the failure event such that a small number of model evaluations is sufficient to achieve a Monte Carlo estimate of the failure probability with an acceptable accuracy; however, the construction of the biasing distribution often requires a large number of model evaluations, which can become computationally expensive. We present a mixed multifidelity importance sampling (MMFIS) approach that leverages computationally cheap but erroneous surrogate models for the construction of the biasing distribution and that uses the original highfidelity model to guarantee unbiased estimates of the failure probability. The key property of our MMFIS estimator is that it can leverage multiple surrogate models for the construction of the biasing distribution, instead of a single surrogate model alone. We show that our MMFIS estimator has a meansquared error that is up to a constant lower than the meansquared errors of the corresponding estimators that uses any of the given surrogate models aloneeven in settings where no information about the approximation qualities of the surrogate models is available. In particular, our MMFIS approach avoids the problem of selecting the surrogate model that leads to the estimator with the lowest meansquared error, which is challenging if the approximation quality of the surrogate models is unknown. We demonstrate our MMFIS approach on numerical examples, where we achieve orders of magnitude speedups compared to using the highfidelity model only. [BibTeX]@article{PKW16MixedMFIS,
title = {Combining multiple surrogate models to accelerate failure probability estimation with expensive highfidelity models},
author = {Peherstorfer, B. and Kramer, B. and Willcox, K.},
journal = {Journal of Computational Physics},
volume = {341},
pages = {6175},
year = {2017},
} 
[9] 
Kramer, B., Peherstorfer, B. & Willcox, K. Feedback Control for Systems with Uncertain Parameters Using OnlineAdaptive Reduced Models. SIAM Journal on Applied Dynamical Systems, 16(3):15631586, 2017. [Abstract]Abstract We consider control and stabilization for largescale dynamical systems with uncertain, timevarying parameters. The timecritical task of controlling a dynamical system poses major challenges: Using largescale models is prohibitive, and accurately inferring parameters can be expensive, too. We address both problems by proposing an offlineonline strategy for controlling systems with timevarying parameters. During the offline phase, we use a highfidelity model to compute a library of optimal feedback controller gains over a sampled set of parameter values. Then, during the online phase, in which the uncertain parameter changes over time, we learn a reducedorder model from system data. The learned reducedorder model is employed within an optimization routine to update the feedback control throughout the online phase. Since the system data naturally reflects the uncertain parameter, the datadriven updating of the controller gains is achieved without an explicit parameter estimation step. We consider two numerical test problems in the form of partial differential equations: a convectiondiffusion system, and a model for flow through a porous medium. We demonstrate on those models that the proposed method successfully stabilizes the system model in the presence of process noise. [BibTeX]@article{KPW16ControlAdaptROM,
title = {Feedback Control for Systems with Uncertain Parameters Using OnlineAdaptive Reduced Models},
author = {Kramer, B. and Peherstorfer, B. and Willcox, K.},
journal = {SIAM Journal on Applied Dynamical Systems},
volume = {16},
number = {3},
pages = {15631586},
year = {2017},
} 
[10] 
Peherstorfer, B., Willcox, K. & Gunzburger, M. Optimal model management for multifidelity Monte Carlo estimation. SIAM Journal on Scientific Computing, 38(5):A3163A3194, 2016. [Abstract]Abstract This work presents an optimal model management strategy that exploits multifidelity surrogate models to accelerate the estimation of statistics of outputs of computationally expensive highfidelity models. Existing acceleration methods typically exploit a multilevel hierarchy of surrogate models that follow a known rate of error decay and computational costs; however, a general collection of surrogate models, which may include projectionbased reduced models, datafit models, support vector machines, and simplifiedphysics models, does not necessarily give rise to such a hierarchy. Our multifidelity approach provides a framework to combine an arbitrary number of surrogate models of any type. Instead of relying on error and cost rates, an optimization problem balances the number of model evaluations across the highfidelity and surrogate models with respect to error and costs. We show that a unique analytic solution of the model management optimization problem exists under mild conditions on the models. Our multifidelity method makes occasional recourse to the highfidelity model; in doing so it provides an unbiased estimator of the statistics of the highfidelity model, even in the absence of error bounds and error estimators for the surrogate models. Numerical experiments with linear and nonlinear examples show that speedups by orders of magnitude are obtained compared to Monte Carlo estimation that invokes a single model only. [BibTeX]@article{Peherstorfer15Multi,
title = {Optimal model management for multifidelity Monte Carlo estimation},
author = {Peherstorfer, B. and Willcox, K. and Gunzburger, M.},
journal = {SIAM Journal on Scientific Computing},
volume = {38},
number = {5},
pages = {A3163A3194},
year = {2016},
} 
[11] 
Peherstorfer, B. & Willcox, K. Datadriven operator inference for nonintrusive projectionbased model reduction. Computer Methods in Applied Mechanics and Engineering, 306:196215, 2016. [Abstract]Abstract This work presents a nonintrusive projectionbased model reduction approach for full models based on timedependent partial differential equations. Projectionbased model reduction constructs the operators of a reduced model by projecting the equations of the full model onto a reduced space. Traditionally, this projection is intrusive, which means that the fullmodel operators are required either explicitly in an assembled form or implicitly through a routine that returns the action of the operators on a given vector; however, in many situations the full model is given as a black box that computes trajectories of the fullmodel states and outputs for given initial conditions and inputs, but does not provide the fullmodel operators. Our nonintrusive operator inference approach infers approximations of the reduced operators from the initial conditions, inputs, trajectories of the states, and outputs of the full model, without requiring the fullmodel operators. Our operator inference is applicable to full models that are linear in the state or have a loworder polynomial nonlinear term. The inferred operators are the solution of a leastsquares problem and converge, with sufficient state trajectory data, in the Frobenius norm to the reduced operators that would be obtained via an intrusive projection of the fullmodel operators. Our numerical results demonstrate operator inference on a linear climate model and on a tubular reactor model with a polynomial nonlinear term of third order. [BibTeX]@article{Peherstorfer16DataDriven,
title = {Datadriven operator inference for nonintrusive projectionbased model reduction},
author = {Peherstorfer, B. and Willcox, K.},
journal = {Computer Methods in Applied Mechanics and Engineering},
volume = {306},
pages = {196215},
year = {2016},
} 
[12] 
Peherstorfer, B. & Willcox, K. Dynamic datadriven model reduction: Adapting reduced models from incomplete data. Advanced Modeling and Simulation in Engineering Sciences, 3(11), 2016. [Abstract]Abstract This work presents a datadriven online adaptive model reduction approach for systems that undergo dynamic changes. Classical model reduction constructs a reduced model of a largescale system in an offline phase and then keeps the reduced model unchanged during the evaluations in an online phase; however, if the system changes online, the reduced model may fail to predict the behavior of the changed system. Rebuilding the reduced model from scratch is often too expensive in timecritical and realtime environments. We introduce a dynamic datadriven adaptation approach that adapts the reduced model from incomplete sensor data obtained from the system during the online computations. The updates to the reduced models are derived directly from the incomplete data, without recourse to the full model. Our adaptivity approach approximates the missing values in the incomplete sensor data with gappy proper orthogonal decomposition. These approximate data are then used to derive lowrank updates to the reduced basis and the reduced operators. In our numerical examples, incomplete data with 3040 percent known values are sufficient to recover the reduced model that would be obtained via rebuilding from scratch. [BibTeX]@article{Peherstorfer16AdaptROM,
title = {Dynamic datadriven model reduction: Adapting reduced models from incomplete data},
author = {Peherstorfer, B. and Willcox, K.},
journal = {Advanced Modeling and Simulation in Engineering Sciences},
volume = {3},
number = {11},
year = {2016},
} 
[13] 
Peherstorfer, B., Cui, T., Marzouk, Y. & Willcox, K. Multifidelity Importance Sampling. Computer Methods in Applied Mechanics and Engineering, 300:490509, 2016. [Abstract]Abstract Estimating statistics of model outputs with the Monte Carlo method often requires a large number of model evaluations. This leads to long runtimes if the model is expensive to evaluate. Importance sampling is one approach that can lead to a reduction in the number of model evaluations. Importance sampling uses a biasing distribution to sample the model more efficiently, but generating such a biasing distribution can be difficult and usually also requires model evaluations. A different strategy to speed up Monte Carlo sampling is to replace the computationally expensive highfidelity model with a computationally cheap surrogate model; however, because the surrogate model outputs are only approximations of the highfidelity model outputs, the estimate obtained using a surrogate model is in general biased with respect to the estimate obtained using the highfidelity model. We introduce a multifidelity importance sampling (MFIS) method, which combines evaluations of both the highfidelity and a surrogate model. It uses a surrogate model to facilitate the construction of the biasing distribution, but relies on a small number of evaluations of the highfidelity model to derive an unbiased estimate of the statistics of interest. We prove that the MFIS estimate is unbiased even in the absence of accuracy guarantees on the surrogate model itself. The MFIS method can be used with any type of surrogate model, such as projectionbased reducedorder models and datafit models. Furthermore, the MFIS method is applicable to blackbox models, i.e., where only inputs and the corresponding outputs of the highfidelity and the surrogate model are available but not the details of the models themselves. We demonstrate on nonlinear and timedependent problems that our MFIS method achieves speedups of up to several orders of magnitude compared to Monte Carlo with importance sampling that uses the highfidelity model only. [BibTeX]@article{Peherstorfer16MFIS,
title = {Multifidelity Importance Sampling},
author = {Peherstorfer, B. and Cui, T. and Marzouk, Y. and Willcox, K.},
journal = {Computer Methods in Applied Mechanics and Engineering},
volume = {300},
pages = {490509},
year = {2016},
} 
[14] 
Peherstorfer, B. & Willcox, K. Online Adaptive Model Reduction for Nonlinear Systems via LowRank Updates. SIAM Journal on Scientific Computing, 37(4):A2123A2150, 2015. [Abstract]Abstract This work presents a nonlinear model reduction approach for systems of equations stemming from the discretization of partial differential equations with nonlinear terms. Our approach constructs a reduced system with proper orthogonal decomposition and the discrete empirical interpolation method (DEIM); however, whereas classical DEIM derives a linear approximation of the nonlinear terms in a static DEIM space generated in an offline phase, our method adapts the DEIM space as the online calculation proceeds and thus provides a nonlinear approximation. The online adaptation uses new data to produce a reduced system that accurately approximates behavior not anticipated in the offline phase. These online data are obtained by querying the fullorder system during the online phase, but only at a few selected components to guarantee a computationally efficient adaptation. Compared to the classical static approach, our online adaptive and nonlinear model reduction approach achieves accuracy improvements of up to three orders of magnitude in our numerical experiments with timedependent and steadystate nonlinear problems. The examples also demonstrate that through adaptivity, our reduced systems provide valid approximations of the fullorder systems outside of the parameter domains for which they were initially built in the offline phase. [BibTeX]@article{Peherstorfer15aDEIM,
title = {Online Adaptive Model Reduction for Nonlinear Systems via LowRank Updates},
author = {Peherstorfer, B. and Willcox, K.},
journal = {SIAM Journal on Scientific Computing},
volume = {37},
number = {4},
pages = {A2123A2150},
year = {2015},
} 
[15] 
Peherstorfer, B., Gómez, P. & Bungartz, H.J. Reduced Models for Sparse Grid Discretizations of the MultiAsset BlackScholes Equation. Advances in Computational Mathematics, 41(5):13651389, 2015. [Abstract]Abstract This work presents reduced models for pricing basket options with the BlackScholes and the Heston model. Basket options lead to multidimensional partial differential equations (PDEs) that quickly become computationally infeasible to discretize on full tensor grids. We therefore rely on sparse grid discretizations of the PDEs, which allow us to cope with the curse of dimensionality to some extent. We then derive reduced models with proper orthogonal decomposition. Our numerical results with the BlackScholes model show that sufficiently accurate results are achieved while gaining speedups between 80 and 160 compared to the highfidelity sparse grid model for 2, 3, and 4asset options. For the Heston model, results are presented for a singleasset option that leads to a twodimensional pricing problem, where we achieve significant speedups with our model reduction approach based on highfidelity sparse grid models. [BibTeX]@article{pehersto15BlackScholes,
title = {Reduced Models for Sparse Grid Discretizations of the MultiAsset BlackScholes Equation},
author = {Peherstorfer, B. and Gómez, P. and Bungartz, H.J.},
journal = {Advances in Computational Mathematics},
volume = {41},
number = {5},
pages = {13651389},
year = {2015},
} 
[16] 
Peherstorfer, B. & Willcox, K. Dynamic DataDriven ReducedOrder Models. Computer Methods in Applied Mechanics and Engineering, 291:2141, 2015. [Abstract]Abstract Datadriven model reduction constructs reducedorder models of largescale systems by learning the system response characteristics from data. Existing methods build the reducedorder models in a computationally expensive offline phase and then use them in an online phase to provide fast predictions of the system. In cases where the underlying system properties are not static but undergo dynamic changes, repeating the offline phase after each system change to rebuild the reducedorder model from scratch forfeits the savings gained in the online phase. This paper proposes dynamic reducedorder models that break with this classical but rigid approach. Dynamic reducedorder models exploit the opportunity presented by dynamic sensor data and adaptively incorporate sensor data during the online phase. This permits online adaptation to system changes while circumventing the expensive rebuilding of the model. A computationally cheap adaptation is achieved by constructing lowrank updates to the reduced operators. With these updates and with sufficient and accurate data, our approach recovers the same model that would be obtained by rebuilding from scratch. We demonstrate dynamic reducedorder models on a structural assessment example in the context of realtime decision making. We consider a plate in bending where the dynamic reducedorder model quickly adapts to changes in structural properties and achieves speedups of four orders of magnitude compared to rebuilding a model from scratch. [BibTeX]@article{pehersto15dynamic,
title = {Dynamic DataDriven ReducedOrder Models},
author = {Peherstorfer, B. and Willcox, K.},
journal = {Computer Methods in Applied Mechanics and Engineering},
volume = {291},
pages = {2141},
year = {2015},
} 
[17] 
Peherstorfer, B., Zimmer, S., Zenger, C. & Bungartz, H.J. A Multigrid Method for Adaptive Sparse Grids. SIAM Journal on Scientific Computing, 37(5):S51S70, 2015. [Abstract]Abstract Sparse grids have become an important tool to reduce the number of degrees of freedom of discretizations of moderately highdimensional partial differential equations; however, the reduction in degrees of freedom comes at the cost of an almost dense and unconventionally structured system of linear equations. To guarantee overall efficiency of the sparse grid approach, special linear solvers are required. We present a multigrid method that exploits the sparse grid structure to achieve an optimal runtime that scales linearly with the number of sparse grid points. Our approach is based on a novel decomposition of the righthand sides of the coarse grid equations that leads to a reformulation in socalled auxiliary coefficients. With these auxiliary coefficients, the righthand sides can be represented in a nodal point basis on lowdimensional full grids. Our proposed multigrid method directly operates in this auxiliary coefficient representation, circumventing most of the computationally cumbersome sparse grid structure. Numerical results on nonadaptive and spatially adaptive sparse grids confirm that the runtime of our method scales linearly with the number of sparse grid points and they indicate that the obtained convergence factors are bounded independently of the mesh width. [BibTeX]@article{peherstorfer15htmg,
title = {A Multigrid Method for Adaptive Sparse Grids},
author = {Peherstorfer, B. and Zimmer, S. and Zenger, C. and Bungartz, H.J.},
journal = {SIAM Journal on Scientific Computing},
volume = {37},
number = {5},
pages = {S51S70},
year = {2015},
} 
[18] 
Peherstorfer, B., Butnaru, D., Willcox, K. & Bungartz, H.J. Localized Discrete Empirical Interpolation Method. SIAM Journal on Scientific Computing, 36(1):A168A192, 2014. [Abstract]Abstract This paper presents a new approach to construct more efficient reducedorder models for nonlinear partial differential equations with proper orthogonal decomposition and the discrete empirical interpolation method (DEIM). Whereas DEIM projects the nonlinear term onto one global subspace, our localized discrete empirical interpolation method (LDEIM) computes several local subspaces, each tailored to a particular region of characteristic system behavior. Then, depending on the current state of the system, LDEIM selects an appropriate local subspace for the approximation of the nonlinear term. In this way, the dimensions of the local DEIM subspaces, and thus the computational costs, remain low even though the system might exhibit a wide range of behaviors as it passes through different regimes. LDEIM uses machine learning methods in the offline computational phase to discover these regions via clustering. Local DEIM approximations are then computed for each cluster. In the online computational phase, machinelearningbased classification procedures select one of these local subspaces adaptively as the computation proceeds. The classification can be achieved using either the system parameters or a lowdimensional representation of the current state of the system obtained via feature extraction. The LDEIM approach is demonstrated for a reacting flow example of an H_2Air flame. In this example, where the system state has a strong nonlinear dependence on the parameters, the LDEIM provides speedups of two orders of magnitude over standard DEIM. [BibTeX]@article{peherstorfer13localized,
title = {Localized Discrete Empirical Interpolation Method},
author = {Peherstorfer, B. and Butnaru, D. and Willcox, K. and Bungartz, H.J.},
journal = {SIAM Journal on Scientific Computing},
volume = {36},
number = {1},
pages = {A168A192},
year = {2014},
} 
[19] 
Peherstorfer, B., Kowitz, C., Pflüger, D. & Bungartz, H.J. Selected Recent Applications of Sparse Grids. Numerical Mathematics: Theory, Methods and Applications, 8(1):4777, 2014. [Abstract]Abstract Sparse grids have become a versatile tool for a vast range of applications reaching from interpolation and numerical quadrature to datadriven problems and uncertainty quantification. We review four selected realworld applications of sparse grids: financial product pricing with the BlackScholes model, interactive exploration of simulation data with sparsegridbased surrogate models, analysis of simulation data through sparse grid data mining methods, and stability investigations of plasma turbulence simulations. [BibTeX]@article{Peherstorfer14SGReview,
title = {Selected Recent Applications of Sparse Grids},
author = {Peherstorfer, B. and Kowitz, C. and Pflüger, D. and Bungartz, H.J.},
journal = {Numerical Mathematics: Theory, Methods and Applications},
volume = {8},
number = {1},
pages = {4777},
year = {2014},
} 
[20] 
Pflüger, D., Peherstorfer, B. & Bungartz, H.J. Spatially adaptive sparse grids for highdimensional datadriven problems. Journal of Complexity, 26(5):508522, 2010. [Abstract]Abstract Sparse grids allow one to employ gridbased discretization methods in datadriven problems. We present an extension of the classical sparse grid approach that allows us to tackle highdimensional problems by spatially adaptive refinement, modified ansatz functions, and efficient regularization techniques. The competitiveness of this method is shown for typical benchmark problems with up to 166 dimensions for classification in data mining, pointing out properties of sparse grids in this context. To gain insight into the adaptive refinement and to examine the scope for further improvements, the approximation of nonsmooth indicator functions with adaptive sparse grids has been studied as a model problem. As an example for an improved adaptive grid refinement, we present results for an edgedetection strategy. [BibTeX]@article{pflueger10spatially,
title = {Spatially adaptive sparse grids for highdimensional datadriven problems},
author = {Pflüger, D. and Peherstorfer, B. and Bungartz, H.J.},
journal = {Journal of Complexity},
volume = {26},
number = {5},
pages = {508522},
year = {2010},
} 


Conference publications (peerreviewed)
[1] 
Peherstorfer, B., Beran, P.S. & Willcox, K. Multifidelity Monte Carlo estimation for largescale uncertainty propagation. In 2018 AIAA NonDeterministic Approaches Conference, AIAA, 2018. [Abstract]Abstract One important task of uncertainty quantification is propagating input uncertainties through a system of interest to quantify the uncertainties' effects on the system outputs; however, numerical methods for uncertainty propagation are often based on Monte Carlo estimation, which can require large numbers of numerical simulations of the numerical model describing the system response to obtain estimates with acceptable accuracies. Thus, if the model is computationally expensive to evaluate, then MonteCarlobased uncertainty propagation methods can quickly become computationally intractable. We demonstrate that multifidelity methods can significantly speedup uncertainty propagation by leveraging lowcost lowfidelity models and establish accuracy guarantees by using occasional recourse to the expensive highfidelity model. We focus on the multifidelity Monte Carlo method, which is a multifidelity approach that optimally distributes work among the models such that the meansquared error of the multifidelity estimator is minimized for a given computational budget. The multifidelity Monte Carlo method is applicable to general types of lowfidelity models, including projectionbased reduced models, datafit surrogates, response surfaces, and simplifiedphysics models. We apply the multifidelity Monte Carlo method to a coupled aerostructural analysis of a wing and a flutter problem with a highaspectratio wing. The lowfidelity models are datafit surrogate models derived with standard procedures that are built in common software environments such as Matlab and numpy/scipy. Our results demonstrate speedups of orders of magnitude compared to using the highfidelity model alone. [BibTeX]@inproceedings{Peherstorfer18SciTechAIAA,
title = {Multifidelity Monte Carlo estimation for largescale uncertainty propagation},
author = {Peherstorfer, B. and Beran, P.S. and Willcox, K.},
year = {2018},
booktitle = {2018 AIAA NonDeterministic Approaches Conference},
publisher = {AIAA},
} 
[2] 
Baptista, R., Marzouk, Y., Willcox, K. & Peherstorfer, B. Optimal Approximations of Coupling in Multidisciplinary Models. In 58th AIAA/ASCE/AHS/ASC Structures, Structural Dynamics, and Materials Conference, AIAA, 2017. [BibTeX]@inproceedings{Ricardo17AIAA,
title = {Optimal Approximations of Coupling in Multidisciplinary Models},
author = {Baptista, R. and Marzouk, Y. and Willcox, K. and Peherstorfer, B.},
year = {2017},
booktitle = {58th AIAA/ASCE/AHS/ASC Structures, Structural Dynamics, and Materials Conference},
publisher = {AIAA},
} 
[3] 
Peherstorfer, B. & Willcox, K. Detecting and Adapting to Parameter Changes for Reduced Models of Dynamic Datadriven Application Systems . In International Conference on Computational Science, Volume 51 of Procedia Computer Science, pages 25532562, Elsevier, 2015. [BibTeX]@inproceedings{Peherstorfer15Detect,
title = {Detecting and Adapting to Parameter Changes for Reduced Models of Dynamic Datadriven Application Systems },
author = {Peherstorfer, B. and Willcox, K.},
volume = {51},
pages = {25532562},
year = {2015},
series = {Procedia Computer Science},
booktitle = {International Conference on Computational Science},
publisher = {Elsevier},
} 
[4] 
Geuss, M., Butnaru, D., Peherstorfer, B., Bungartz, H.J. & Lohmann, B. Parametric model order reduction by sparsegridbased interpolation on matrix manifolds for multidimensional parameter spaces. In European Control Conference (ECC) 2014, IEEE, 2014. [BibTeX]@inproceedings{Geuss14SGInterp,
title = {Parametric model order reduction by sparsegridbased interpolation on matrix manifolds for multidimensional parameter spaces},
author = {Geuss, M. and Butnaru, D. and Peherstorfer, B. and Bungartz, H.J. and Lohmann, B.},
year = {2014},
booktitle = {European Control Conference (ECC) 2014},
publisher = {IEEE},
} 
[5] 
Peherstorfer, B., Pflüger, D. & Bungartz, H.J. Density Estimation with Adaptive Sparse Grids for Large Data Sets. In SIAM Data Mining 2014, SIAM, 2014. [Abstract]Abstract Nonparametric density estimation is a fundamental problem of statistics and data mining. Even though kernel density estimation is the most widely used method, its performance highly depends on the choice of the kernel bandwidth, and it can become computationally expensive for large data sets. We present an adaptive sparsegridbased density estimation method which discretizes the estimated density function on basis functions centered at grid points rather than on kernels centered at the data points. Thus, the costs of evaluating the estimated density function are independent from the number of data points. We give details on how to estimate density functions on sparse grids and develop a cross validation technique for the parameter selection. We show numerical results to confirm that our sparsegridbased method is wellsuited for large data sets, and, finally, employ our method for the classification of astronomical objects to demonstrate that it is competitive to current kernelbased density estimation approaches with respect to classification accuracy and runtime [BibTeX]@inproceedings{Peherstorfer14Density,
title = {Density Estimation with Adaptive Sparse Grids for Large Data Sets},
author = {Peherstorfer, B. and Pflüger, D. and Bungartz, H.J.},
year = {2014},
booktitle = {SIAM Data Mining 2014},
publisher = {SIAM},
} 
[6] 
Peherstorfer, B., Franzelin, F., Pflüger, D. & Bungartz, H.J. Classification with Probability Density Estimation on Sparse Grids. In Sparse Grids and Applications 2012, Volume 97 of Lecture Notes in Computational Science and Engineering, 2014. (accepted). [BibTeX]@inproceedings{peherstorfer13classification,
title = {Classification with Probability Density Estimation on Sparse Grids},
author = {Peherstorfer, B. and Franzelin, F. and Pflüger, D. and Bungartz, H.J.},
volume = {97},
year = {2014},
series = {Lecture Notes in Computational Science and Engineering},
booktitle = {Sparse Grids and Applications 2012},
} 
[7] 
Peherstorfer, B., Adorf, J., Pflüger, D. & Bungartz, H.J. Image Segmentation with Adaptive Sparse Grids. In AI 2013: Advances in Artificial Intelligence, Volume 8272 of Lecture Notes in Computer Science Volume, pages 160165, Springer, 2013. [BibTeX]@inproceedings{peherstorfer13image,
title = {Image Segmentation with Adaptive Sparse Grids},
author = {Peherstorfer, B. and Adorf, J. and Pflüger, D. and Bungartz, H.J.},
volume = {8272},
pages = {160165},
year = {2013},
series = {Lecture Notes in Computer Science Volume},
booktitle = {AI 2013: Advances in Artificial Intelligence},
publisher = {Springer},
} 
[8] 
Bohn, B., Garcke, J., IzaTeran, R., Paprotny, A., Peherstorfer, B., Schepsmeier, U. & Thole, C.A. Analysis of car crash simulation data with nonlinear machine learning methods. In International Conference on Computational Science, Volume 18 of Procedia Computer Science, pages 621630, Elsevier, 2013. [BibTeX]@inproceedings{bohn13analysis,
title = {Analysis of car crash simulation data with nonlinear machine learning methods},
author = {Bohn, B. and Garcke, J. and IzaTeran, R. and Paprotny, A. and Peherstorfer, B. and Schepsmeier, U. and Thole, C.A.},
volume = {18},
pages = {621630},
year = {2013},
series = {Procedia Computer Science},
booktitle = {International Conference on Computational Science},
publisher = {Elsevier},
} 
[9] 
Peherstorfer, B., Zimmer, S. & Bungartz, H.J. Model Reduction with the Reduced Basis Method and Sparse Grids. In Sparse Grids and Applications 2011, Volume 88 of Lecture Notes in Computational Science and Engineering, pages 223242, Springer, 2013. [BibTeX]@inproceedings{peherstorfer13model,
title = {Model Reduction with the Reduced Basis Method and Sparse Grids},
author = {Peherstorfer, B. and Zimmer, S. and Bungartz, H.J.},
volume = {88},
pages = {223242},
year = {2013},
series = {Lecture Notes in Computational Science and Engineering},
booktitle = {Sparse Grids and Applications 2011},
publisher = {Springer},
} 
[10] 
Butnaru, D., Peherstorfer, B., Pflüger, D. & Bungartz, H.J. Fast Insight into HighDimensional Parametrized Simulation Data. In 11th International Conference on Machine Learning and Applications (ICMLA), pages 265270, IEEE, 2012. [BibTeX]@inproceedings{butnaru12fast,
title = {Fast Insight into HighDimensional Parametrized Simulation Data},
author = {Butnaru, D. and Peherstorfer, B. and Pflüger, D. and Bungartz, H.J.},
pages = {265270},
year = {2012},
booktitle = {11th International Conference on Machine Learning and Applications (ICMLA)},
publisher = {IEEE},
} 
[11] 
Peherstorfer, B., Pflüger, D. & Bungartz, H.J. Clustering Based on Density Estimation with Sparse Grids. In KI 2012: Advances in Artificial Intelligence, Volume 7526 of Lecture Notes in Computer Science, pages 131142, Springer, 2012. [BibTeX]@inproceedings{peherstorfer12clustering,
title = {Clustering Based on Density Estimation with Sparse Grids},
author = {Peherstorfer, B. and Pflüger, D. and Bungartz, H.J.},
volume = {7526},
pages = {131142},
year = {2012},
series = {Lecture Notes in Computer Science},
booktitle = {KI 2012: Advances in Artificial Intelligence},
publisher = {Springer},
} 
[12] 
Heinecke, A., Peherstorfer, B., Pflüger, D. & Song, Z. Sparse Grid Classifiers as Base Learners for AdaBoost. In International Conference on High Performance Computing and Simulation (HPCS), pages 161166, IEEE, 2012. [BibTeX]@inproceedings{heinecke12sparse,
title = {Sparse Grid Classifiers as Base Learners for AdaBoost},
author = {Heinecke, A. and Peherstorfer, B. and Pflüger, D. and Song, Z.},
pages = {161166},
year = {2012},
booktitle = {International Conference on High Performance Computing and Simulation (HPCS)},
publisher = {IEEE},
} 
[13] 
Peherstorfer, B. & Bungartz, H.J. SemiCoarsening in Space and Time for the Hierarchical Transformation Multigrid Method. In International Conference on Computational Science, Volume 9 of Procedia Computer Science, pages 20002003, Elsevier, 2012. [BibTeX]@inproceedings{peherstorfer12semicoarsening,
title = {SemiCoarsening in Space and Time for the Hierarchical Transformation Multigrid Method},
author = {Peherstorfer, B. and Bungartz, H.J.},
volume = {9},
pages = {20002003},
year = {2012},
series = {Procedia Computer Science},
booktitle = {International Conference on Computational Science},
publisher = {Elsevier},
} 
[14] 
Peherstorfer, B., Pflüger, D. & Bungartz, H.J. A SparseGridBased OutofSample Extension for Dimensionality Reduction and Clustering with Laplacian Eigenmaps. In AI 2011: Advances in Artificial Intelligence, Volume 7106 of Lecture Notes in Computer Science, pages 112121, Springer, 2011. [BibTeX]@inproceedings{peherstorfer11sparsegridbased,
title = {A SparseGridBased OutofSample Extension for Dimensionality Reduction and Clustering with Laplacian Eigenmaps},
author = {Peherstorfer, B. and Pflüger, D. and Bungartz, H.J.},
volume = {7106},
pages = {112121},
year = {2011},
series = {Lecture Notes in Computer Science},
booktitle = {AI 2011: Advances in Artificial Intelligence},
publisher = {Springer},
} 


PhD thesis
[1] 
Peherstorfer, B. Model Order Reduction of Parametrized Systems with Sparse Grid Learning Techniques. Technische Universität München, Munich, Germany, 2013. [BibTeX]@misc{PeherstoPhD,
title = {Model Order Reduction of Parametrized Systems with Sparse Grid Learning Techniques},
author = {Peherstorfer, B.},
year = {2013},
institution = {Technische Universität München},
} 


Talks
[1] 
Peherstorfer, B. Multifidelity methods and contextaware model reduction for Monte Carlo estimation and beyond. In Seminar Numerische Mathematik, Technical University Berlin, Berlin, Germany, 2018. 
[2] 
Peherstorfer, B. A Multifidelity CrossEntropy Method for Rare Event Simulation. In SIAM Uncertainty Quantification 2018, Garden Grove, CA, 2018. 
[3] 
Peherstorfer, B. DataDriven Multifidelity Methods for Monte Carlo Estimation. In Model Reduction of Parametrized Systems (MoRePaS) IV, Nantes, France, 2018. 
[4] 
Peherstorfer, B. Multifidelity Monte Carlo estimation with adaptive lowfidelity models. In Reducing dimensions and cost for UQ in complex systems, Isaac Newton Institute for Mathematical Sciences, Cambridge, UK, 2018. 
[5] 
Peherstorfer, B. DataDriven Multifidelity Methods for Monte Carlo Estimation. In Engineering Physics Seminars and Colloquium, University of WisconsinMadison, Madison, USA, 2018. 
[6] 
Peherstorfer, B. Multifidelity Monte Carlo estimation for largescale uncertainty propagation. In 2018 AIAA NonDeterministic Approaches Conference (AIAA SciTech), Kissimmee, USA, 2018. 
[7] 
Peherstorfer, B. Multifidelity methods for rare event simulation. In European Numerical Mathematics and Advanced Applications (ENUMATH), Bergen, Norway, 2017. 
[8] 
Peherstorfer, B. Online adaptive discrete empirical interpolation for nonlinear model reduction. In European Numerical Mathematics and Advanced Applications (ENUMATH), Bergen, Norway, 2017. 
[9] 
Peherstorfer, B. Multifidelity methods for uncertainty propagation and rare event simulation. In QUIET 2017  Quantification of Uncertainty: Improving Efficiency and Technology SISSA, Trieste, Italy, 2017. 
[10] 
Peherstorfer, B. Optimal lowrank updates for online adaptive model reduction with the discrete empirical interpolation method. In Householder Symposium XX on Numerical Linear Algebra, Blacksburg, USA, 2017. 
[11] 
Peherstorfer, B. Multifidelity Monte Carlo Methods for Rare Event Simulation. In MATRIX Workshop on Inverse Problems, Melbourne, Australia, 2017. 
[12] 
Peherstorfer, B. Datadriven reduced model construction with the timedomain Loewner framework and operator inference. In Colloquium, Department of Mathematics, Virginia Tech, Blacksburg, USA, 2017. 
[13] 
Peherstorfer, B. Multifidelity Methods for Uncertainty Propagation and Rare Event Simulation. In Workshop on DataDriven Modeling and Uncertainty Quantification (UQPM), Austin, USA, 2017. 
[14] 
Peherstorfer, B. Multifidelity Monte Carlo Methods with OptimallyAdapted Surrogate Models. In SIAM Computational Science and Engineering 2017, Atlanta, USA, 2017. 
[15] 
Peherstorfer, B. Optimal sampling in multifidelity Monte Carlo estimation for efficient uncertainty propagation. In SILO Seminar Wisconsin Institute for Discovery, Madison, USA, 2017. 
[16] 
Peherstorfer, B. Optimal sampling in multifidelity Monte Carlo estimation for efficient uncertainty propagation. In Applied and Computational Mathematics Seminar Department of Mathematics, University of WisconsinMadison, Madison, USA, 2016. 
[17] 
Peherstorfer, B. Safe and Efficient DataDriven Model Reduction for Critical Engineering Applications. In Next Generation Mobility Modeling and Simulation, Novi, USA, 2016. 
[18] 
Peherstorfer, B. DataDriven Methods for Nonintrusive Model Reduction. In SIAM Annual Meeting 2016, Boston, USA, 2016. 
[19] 
Peherstorfer, B. Multifidelity Methods for Uncertainty Quantification. In Workshop on Data to Decisions in Aerospace Engineering, Auckland, New Zealand, 2016. 
[20] 
Peherstorfer, B. Multifidelity Methods for Uncertainty Quantification. In SIAM Uncertainty Quantification 2016, Lausanne, Switzerland, 2016. 
[21] 
Peherstorfer, B. Multifidelity Monte Carlo estimation with multiple surrogate models. In Copper Mountain conference on iterative methods, Copper Mountain, USA, 2016. 
[22] 
Peherstorfer, B. Multifidelity methods for uncertainty quantification. In Third International Workshop on Model Reduction for Parametrized Systems (MoRePaS III) SISSA, Trieste, Italy, 2015. 
[23] 
Peherstorfer, B. Online adaptive model reduction with dynamic models and sparse sampling. In European Numerical Mathematics and Advanced Applications (ENUMATH) Middle East Technical University, Ankara, Turkey, 2015. 
[24] 
Peherstorfer, B. Multifidelity Monte Carlo. In 6th Workshop on HighDimensional Approximation University of Bonn, Bonn, Germany, 2015. 
[25] 
Peherstorfer, B. Detecting and Adapting to Parameter Changes for Reduced Models of Dynamic Datadriven Application Systems. In International Conference on Computational Science Reykjavík University, Reykjavík, Iceland, 2015. 
[26] 
Peherstorfer, B. Online Adaptive Model Reduction. In SIAM Conference on Computational Science and Engineering 2015 SIAM, Salt Lake City, USA, 2015. 
[27] 
Peherstorfer, B. Nonlinear model reduction through online adaptivity and dynamic models. In Scientific Computing Colloquium TUM, Munich, Germany, 2014. 
[28] 
Peherstorfer, B. Online Adaptive Model Reduction for Nonlinear Systems. In SIAM MIT Chapter 2014 MIT, Boston, USA, 2014. 
[29] 
Peherstorfer, B. Sparse grid density estimation with data independent quantities. In Sparse Grids and Applications 2014 SimTech, Stuttgart, Germany, 2014. 
[30] 
Peherstorfer, B. Density Estimation with Adaptive Sparse Grids for Large Datasets. In SIAM Data Mining 2014 SIAM, Philadelphia, USA, 2014. 
[31] 
Peherstorfer, B. Density Estimation with Adaptive Sparse Grids. In SIAM Uncertainty Quantification 2014 SIAM, Savannah, USA, 2014. 
[32] 
Peherstorfer, B. Localized model order reduction with machine learning methods. In SIAM and MIT CCE series Center for Computational Engineering, MIT, MIT, Boston, USA, 2014. 
[33] 
Peherstorfer, B. Localized Discrete Empirical Interpolation Method. In ACDL Seminars Department of Aeronautics and Astronautics, MIT, Department of Aeronautics and Astronautics, MIT, Boston, USA, 2014. 
[34] 
Peherstorfer, B. Localized DEIM based on feature extraction. In Model Reduction and Approximation for Complex Systems 2013 Institut für Informatik, Technische Universität München, Centre International de Rencontres Mathematiques, Marseille, France, 2013. 
[35] 
Bungartz, H.J. & Peherstorfer, B. Tackling higher dimensionalities with sparse grids. In ACM/FEF 2013, San Diego, USA, 2013. 
[36] 
Peherstorfer, B. Density Estimation for Large Datasets with Sparse Grids. In SIAM Conference on Computational Science and Engineering Institut für Informatik, Technische Universität München, Boston, USA, 2013. 
[37] 
Peherstorfer, B. Dünne Gitter: Konstruktion und Anwendung optimaler Diskretisierungen. In NUMET 2013 Lehrstuhl für Strömungsmechanik (LSTM)Institut für Informatik, Technische Universität München, Lehrstuhl für Strömungsmechanik (LSTM), Universität ErlangenNürnberg, Germany, 2013. 
[38] 
Peherstorfer, B. Reduced Order Models with LDEIM for Parametrized PDEs with Nonlinear Terms. In Angewandte Analysis und Numerische Simulation Institut für Informatik, Technische Universität München, Universität Stuttgart, Germany, 2013. 
[39] 
Peherstorfer, B. Localized Discrete Empirical Interpolation Method. In Second International Workshop on Model Reduction for Parametrized Systems (MoRePaS II) Institut für Informatik, Technische Universität München, Schloss Reisensburg, Günzburg, Germany, 2012. 
[40] 
Peherstorfer, B. Clustering Based on Density Estimation with Sparse Grids. In KI 2012: Advances in Artificial Intelligence Institut für Informatik, Technische Universität München, Saarbrücken, Germany, 2012. 
[41] 
Peherstorfer, B. A SparseGridBased OutofSample Extension for Dimensionality Reduction and Clustering with Laplacian Eigenmaps. In SGA 2012 Institut für Informatik, Technische Universität München, Munich, Germany, 2012. 
[42] 
Peherstorfer, B. A multigrid method for PDEs on spatially adaptive sparse grids. In 28th GAMMSeminar on Analysis and Numerical Methods in Higher Dimensions Institut für Informatik, Technische Universität München, Leipzig, Germany, 2012. 
[43] 
Peherstorfer, B. Clustering of TruckData with Sparse Grids. In Project Meeting BMBF SIMDATANL Institut für Informatik, Technische Universität München, Fraunhofer SCAI, Bonn, Germany, 2011. 
[44] 
Peherstorfer, B. A multigrid method for PDEs on spatially adaptive sparse grids. In 4th Workshop on HighDimensional Approximation Fakultät für Informatik, Technische Universität München, Bonn, Germany, 2011. 
[45] 
Peherstorfer, B. Reduced Basis Methods and Sparse Grids. In HIM  Workshop on Sparse Grids and Applications Fakultät für Informatik, Technische Universität München, Bonn, Germany, 2011. 
[46] 
Peherstorfer, B. Hierarchical Transformation Multigrid. In Ferienakademie Fakultät für Informatik, Technische Universität München, Durnholz, Italy, 2010. 
[47] 
Peherstorfer, B. Introduction to Reduced Basis Methods. In Ferienakademie Fakultät für Informatik, Technische Universität München, Durnholz, Italy, 2010. 

