stop eating CPU time when window is in background
This commit is contained in:
parent
d6316aa718
commit
490f02f2fe
|
@ -24,6 +24,7 @@ use crate::framework::render_opengl::{GLContext, OpenGLRenderer};
|
||||||
use crate::framework::ui::init_imgui;
|
use crate::framework::ui::init_imgui;
|
||||||
use crate::Game;
|
use crate::Game;
|
||||||
use crate::GAME_SUSPENDED;
|
use crate::GAME_SUSPENDED;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
pub struct SDL2Backend {
|
pub struct SDL2Backend {
|
||||||
context: Sdl,
|
context: Sdl,
|
||||||
|
@ -176,6 +177,7 @@ impl BackendEventLoop for SDL2EventLoop {
|
||||||
{
|
{
|
||||||
let mutex = GAME_SUSPENDED.lock().unwrap();
|
let mutex = GAME_SUSPENDED.lock().unwrap();
|
||||||
if *mutex {
|
if *mutex {
|
||||||
|
std::thread::sleep(Duration::from_millis(10));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue