@inbook{6b68c579539e4f89b24a5ff09a298eca,
title = "Stochastic Control with Complete Observations on an Infinite Horizon",
abstract = "Optimal stochastic control problems with complete observations and on an infinite horizon are considered. Control theory for both the average cost and the discounted cost function is treated. The dynamic programming approach is formulated as a procedure to determine the value and the value function; from the value function, one can derive the optimal control law. Stochastic controllability is in general needed to prove that there exists a control law with a finite average cost in case of positive cost. Special cases treated in depth are: the case of a Gaussian stochastic control system and of a finite stochastic control system.",
keywords = "Complete observations, Infinite horizon, Stochastic control",
author = "{van Schuppen}, {Jan H.}",
year = "2021",
doi = "10.1007/978-3-030-66952-2_13",
language = "English",
series = "Communications and Control Engineering",
publisher = "Springer",
pages = "493--546",
booktitle = "Control and System Theory of Discrete-Time Stochastic Systems",
}