1#![allow(dead_code)]
15
16use crate::{
17 hardware::virtio::gpu,
18 memory::{self, phys_to_virt},
19};
20use core::sync::atomic::{AtomicBool, Ordering};
21use spin::Mutex;
22
23const MAX_WIDTH: u32 = 3840;
25const MAX_HEIGHT: u32 = 2160;
26const MAX_DIRTY_RECTS: usize = 8;
27const PRESENT_MIN_TICKS: u64 = 1;
28
29#[derive(Clone, Copy, PartialEq, Eq, Debug)]
31pub enum FramebufferSource {
32 Limine,
33 VirtioGpu,
34 None,
35}
36
37#[derive(Clone, Copy, Debug)]
39pub struct PixelFormat {
40 pub red_mask: u32,
41 pub red_shift: u8,
42 pub green_mask: u32,
43 pub green_shift: u8,
44 pub blue_mask: u32,
45 pub blue_shift: u8,
46 pub bits_per_pixel: u8,
47}
48
49impl Default for PixelFormat {
50 fn default() -> Self {
52 Self {
53 red_mask: 0x00FF0000,
54 red_shift: 16,
55 green_mask: 0x0000FF00,
56 green_shift: 8,
57 blue_mask: 0x000000FF,
58 blue_shift: 0,
59 bits_per_pixel: 32,
60 }
61 }
62}
63
64#[derive(Clone, Copy, Debug)]
66pub struct FramebufferInfo {
67 pub base: u64,
68 pub base_virt: usize,
69 pub width: u32,
70 pub height: u32,
71 pub stride: u32,
72 pub format: PixelFormat,
73 pub source: FramebufferSource,
74}
75
76#[derive(Clone, Copy, Debug)]
77pub struct FramebufferRenderStats {
78 pub present_pending: bool,
79 pub dirty_region_count: usize,
80 pub last_present_tick: u64,
81}
82
83unsafe impl Send for FramebufferInfo {}
84unsafe impl Sync for FramebufferInfo {}
85
86pub struct Framebuffer {
88 info: FramebufferInfo,
89 double_buffer: Option<*mut u8>,
90 use_double_buffer: bool,
91 dirty: DirtyRectSet,
92 present_pending: bool,
93 last_present_tick: u64,
94}
95
96unsafe impl Send for Framebuffer {}
97unsafe impl Sync for Framebuffer {}
98
99static FRAMEBUFFER: Mutex<Option<Framebuffer>> = Mutex::new(None);
100static FRAMEBUFFER_INITIALIZED: AtomicBool = AtomicBool::new(false);
101
102#[derive(Clone, Copy)]
103struct DirtyRect {
104 valid: bool,
105 x0: u32,
106 y0: u32,
107 x1: u32,
108 y1: u32,
109}
110
111#[derive(Clone, Copy)]
112struct DirtyRectSet {
113 rects: [DirtyRect; MAX_DIRTY_RECTS],
114 len: usize,
115}
116
117impl DirtyRect {
118 const fn empty() -> Self {
120 Self {
121 valid: false,
122 x0: 0,
123 y0: 0,
124 x1: 0,
125 y1: 0,
126 }
127 }
128
129 fn include(&mut self, x: u32, y: u32, width: u32, height: u32) {
131 if width == 0 || height == 0 {
132 return;
133 }
134 let x1 = x.saturating_add(width);
135 let y1 = y.saturating_add(height);
136 if !self.valid {
137 self.valid = true;
138 self.x0 = x;
139 self.y0 = y;
140 self.x1 = x1;
141 self.y1 = y1;
142 } else {
143 self.x0 = self.x0.min(x);
144 self.y0 = self.y0.min(y);
145 self.x1 = self.x1.max(x1);
146 self.y1 = self.y1.max(y1);
147 }
148 }
149
150 fn take(&mut self) -> Option<(u32, u32, u32, u32)> {
152 if !self.valid {
153 return None;
154 }
155 let x = self.x0;
156 let y = self.y0;
157 let width = self.x1.saturating_sub(self.x0);
158 let height = self.y1.saturating_sub(self.y0);
159 *self = Self::empty();
160 Some((x, y, width, height))
161 }
162}
163
164impl DirtyRectSet {
165 const fn empty() -> Self {
166 Self {
167 rects: [DirtyRect::empty(); MAX_DIRTY_RECTS],
168 len: 0,
169 }
170 }
171
172 fn clear(&mut self) {
173 self.len = 0;
174 let mut i = 0;
175 while i < MAX_DIRTY_RECTS {
176 self.rects[i] = DirtyRect::empty();
177 i += 1;
178 }
179 }
180
181 fn include(&mut self, x: u32, y: u32, width: u32, height: u32) {
182 if width == 0 || height == 0 {
183 return;
184 }
185 let mut next = DirtyRect::empty();
186 next.include(x, y, width, height);
187
188 let mut idx = 0;
189 while idx < self.len {
190 let cur = self.rects[idx];
191 if !cur.valid {
192 idx += 1;
193 continue;
194 }
195 let overlaps =
196 next.x0 <= cur.x1 && next.x1 >= cur.x0 && next.y0 <= cur.y1 && next.y1 >= cur.y0;
197 if overlaps {
198 next.include(
199 cur.x0,
200 cur.y0,
201 cur.x1.saturating_sub(cur.x0),
202 cur.y1.saturating_sub(cur.y0),
203 );
204 self.rects[idx] = self.rects[self.len - 1];
205 self.rects[self.len - 1] = DirtyRect::empty();
206 self.len -= 1;
207 idx = 0;
208 continue;
209 }
210 idx += 1;
211 }
212
213 if self.len < MAX_DIRTY_RECTS {
214 self.rects[self.len] = next;
215 self.len += 1;
216 return;
217 }
218
219 self.rects[0].include(
220 next.x0,
221 next.y0,
222 next.x1.saturating_sub(next.x0),
223 next.y1.saturating_sub(next.y0),
224 );
225 }
226}
227
228impl Framebuffer {
229 fn request_present(&mut self) {
230 self.present_pending = true;
231 }
232
233 fn present_if_due(&mut self, force: bool) {
234 if !self.present_pending {
235 return;
236 }
237 if self.use_double_buffer {
238 return;
239 }
240 let now = crate::process::scheduler::ticks();
241 if force || now.saturating_sub(self.last_present_tick) >= PRESENT_MIN_TICKS {
242 self.present_pending = false;
243 self.last_present_tick = now;
244 } else {
245 return;
246 }
247
248 if self.info.source != FramebufferSource::VirtioGpu {
249 self.dirty.clear();
250 return;
251 }
252
253 if let Some(gpu) = gpu::get_gpu() {
254 let mut idx = 0;
255 while idx < self.dirty.len {
256 let rect = self.dirty.rects[idx];
257 if rect.valid {
258 let _ = gpu.present_from_linear(
259 self.info.base_virt as *const u8,
260 self.info.stride,
261 rect.x0,
262 rect.y0,
263 rect.x1.saturating_sub(rect.x0),
264 rect.y1.saturating_sub(rect.y0),
265 );
266 }
267 idx += 1;
268 }
269 }
270 self.dirty.clear();
271 }
272
273 pub fn init_limine(
275 addr: u64,
276 width: u32,
277 height: u32,
278 stride: u32,
279 format: PixelFormat,
280 ) -> Result<(), &'static str> {
281 if addr == 0 || width == 0 || height == 0 {
282 return Err("Invalid framebuffer parameters");
283 }
284
285 let base_virt = addr as usize;
286
287 let info = FramebufferInfo {
288 base: addr,
289 base_virt,
290 width,
291 height,
292 stride,
293 format,
294 source: FramebufferSource::Limine,
295 };
296
297 let fb = Framebuffer {
298 info,
299 double_buffer: None,
300 use_double_buffer: false,
301 dirty: DirtyRectSet::empty(),
302 present_pending: false,
303 last_present_tick: 0,
304 };
305
306 *FRAMEBUFFER.lock() = Some(fb);
307 FRAMEBUFFER_INITIALIZED.store(true, Ordering::SeqCst);
308
309 log::info!(
310 "[FB] Limine framebuffer: {}x{} @ {}bpp, stride={}",
311 width,
312 height,
313 format.bits_per_pixel,
314 stride
315 );
316
317 Ok(())
318 }
319
320 pub fn init_virtio_gpu() -> Result<(), &'static str> {
322 let gpu_info = gpu::get_framebuffer_info().ok_or("VirtIO GPU not initialized")?;
323
324 let format = PixelFormat {
325 red_mask: 0x00FF0000,
326 red_shift: 16,
327 green_mask: 0x0000FF00,
328 green_shift: 8,
329 blue_mask: 0x000000FF,
330 blue_shift: 0,
331 bits_per_pixel: 32,
332 };
333
334 let info = FramebufferInfo {
335 base: gpu_info.framebuffer_phys,
336 base_virt: gpu_info.framebuffer_virt as usize,
337 width: gpu_info.width,
338 height: gpu_info.height,
339 stride: gpu_info.stride,
340 format,
341 source: FramebufferSource::VirtioGpu,
342 };
343
344 let db_size = (info.stride as usize) * (info.height as usize);
346 if db_size == 0 {
347 return Err("Invalid VirtIO framebuffer size");
348 }
349 let db_pages = (db_size + 4095) / 4096;
350 let db_order = db_pages.next_power_of_two().trailing_zeros() as u8;
351 let db_frame = crate::sync::with_irqs_disabled(|token| {
352 memory::allocate_phys_contiguous(token, db_order)
353 })
354 .map_err(|_| "Failed to allocate double buffer")?;
355 let db_virt = phys_to_virt(db_frame.start_address.as_u64()) as *mut u8;
356 unsafe {
357 core::ptr::write_bytes(db_virt, 0, db_size);
360 }
361
362 let fb = Framebuffer {
363 info,
364 double_buffer: Some(db_virt),
365 use_double_buffer: true,
366 dirty: DirtyRectSet::empty(),
367 present_pending: false,
368 last_present_tick: 0,
369 };
370
371 *FRAMEBUFFER.lock() = Some(fb);
372 FRAMEBUFFER_INITIALIZED.store(true, Ordering::SeqCst);
373
374 log::info!(
375 "[FB] VirtIO GPU framebuffer: {}x{} @ {}bpp, stride={}",
376 info.width,
377 info.height,
378 info.format.bits_per_pixel,
379 info.stride
380 );
381
382 Ok(())
383 }
384
385 pub fn info() -> Option<FramebufferInfo> {
387 FRAMEBUFFER.lock().as_ref().map(|fb| fb.info)
388 }
389
390 pub fn width() -> u32 {
392 FRAMEBUFFER
393 .lock()
394 .as_ref()
395 .map(|fb| fb.info.width)
396 .unwrap_or(0)
397 }
398
399 pub fn height() -> u32 {
401 FRAMEBUFFER
402 .lock()
403 .as_ref()
404 .map(|fb| fb.info.height)
405 .unwrap_or(0)
406 }
407
408 pub fn stride() -> u32 {
410 FRAMEBUFFER
411 .lock()
412 .as_ref()
413 .map(|fb| fb.info.stride)
414 .unwrap_or(0)
415 }
416
417 pub fn is_available() -> bool {
419 FRAMEBUFFER_INITIALIZED.load(Ordering::Relaxed)
420 }
421
422 pub fn source() -> FramebufferSource {
424 FRAMEBUFFER
425 .lock()
426 .as_ref()
427 .map(|fb| fb.info.source)
428 .unwrap_or(FramebufferSource::None)
429 }
430
431 pub fn render_stats() -> Option<FramebufferRenderStats> {
432 FRAMEBUFFER
433 .lock()
434 .as_ref()
435 .map(|fb| FramebufferRenderStats {
436 present_pending: fb.present_pending,
437 dirty_region_count: fb.dirty.len,
438 last_present_tick: fb.last_present_tick,
439 })
440 }
441
442 pub fn set_pixel(x: u32, y: u32, r: u8, g: u8, b: u8) {
444 {
445 let mut guard = FRAMEBUFFER.lock();
446 let fb = match guard.as_mut() {
447 Some(f) => f,
448 None => return,
449 };
450
451 if x >= fb.info.width || y >= fb.info.height {
452 return;
453 }
454
455 let pixel = ((r as u32) << fb.info.format.red_shift)
456 | ((g as u32) << fb.info.format.green_shift)
457 | ((b as u32) << fb.info.format.blue_shift);
458
459 let offset = if fb.use_double_buffer {
460 fb.double_buffer.unwrap_or(fb.info.base_virt as *mut u8)
461 } else {
462 fb.info.base_virt as *mut u8
463 };
464
465 unsafe {
466 let pixel_ptr = offset.add((y * fb.info.stride + x * 4) as usize);
467 core::ptr::write(pixel_ptr as *mut u32, pixel);
468 }
469
470 fb.dirty.include(x, y, 1, 1);
471 fb.request_present();
472 fb.present_if_due(false);
473 }
474 }
475
476 pub fn fill_rect(x: u32, y: u32, width: u32, height: u32, r: u8, g: u8, b: u8) {
478 if width == 0 || height == 0 {
479 return;
480 }
481
482 {
483 let mut guard = FRAMEBUFFER.lock();
484 let fb = match guard.as_mut() {
485 Some(f) => f,
486 None => return,
487 };
488
489 if x >= fb.info.width || y >= fb.info.height {
490 return;
491 }
492
493 let max_w = fb.info.width - x;
494 let max_h = fb.info.height - y;
495 let width = width.min(max_w);
496 let height = height.min(max_h);
497 if width == 0 || height == 0 {
498 return;
499 }
500
501 let pixel = ((r as u32) << fb.info.format.red_shift)
502 | ((g as u32) << fb.info.format.green_shift)
503 | ((b as u32) << fb.info.format.blue_shift);
504
505 let offset = if fb.use_double_buffer {
506 fb.double_buffer.unwrap_or(fb.info.base_virt as *mut u8)
507 } else {
508 fb.info.base_virt as *mut u8
509 };
510
511 let stride = fb.info.stride as usize;
512 for dy in 0..height as usize {
513 let row_ptr =
514 unsafe { offset.add((y as usize + dy) * stride + x as usize * 4) as *mut u32 };
515 unsafe {
516 core::slice::from_raw_parts_mut(row_ptr, width as usize).fill(pixel);
517 }
518 }
519
520 fb.dirty.include(x, y, width, height);
521 fb.request_present();
522 fb.present_if_due(false);
523 }
524 }
525
526 pub fn draw_hline(x: u32, y: u32, length: u32, r: u8, g: u8, b: u8) {
528 Self::fill_rect(x, y, length, 1, r, g, b);
529 }
530
531 pub fn draw_vline(x: u32, y: u32, length: u32, r: u8, g: u8, b: u8) {
533 Self::fill_rect(x, y, 1, length, r, g, b);
534 }
535
536 pub fn clear() {
538 let info = Self::info();
539 if let Some(info) = info {
540 Self::fill_rect(0, 0, info.width, info.height, 0, 0, 0);
541 }
542 }
543
544 pub fn swap_buffers() {
546 let mut virtio_present = None;
547 {
548 let mut guard = FRAMEBUFFER.lock();
549 let fb = match guard.as_mut() {
550 Some(f) => f,
551 None => return,
552 };
553
554 if !fb.use_double_buffer || fb.double_buffer.is_none() {
555 return;
556 }
557
558 let db = fb.double_buffer.unwrap();
559 if fb.dirty.len == 0 {
560 return;
561 }
562
563 if fb.info.source == FramebufferSource::VirtioGpu {
564 let mut regions = [(0u32, 0u32, 0u32, 0u32); MAX_DIRTY_RECTS];
565 let mut idx = 0;
566 while idx < fb.dirty.len {
567 let rect = fb.dirty.rects[idx];
568 regions[idx] = (
569 rect.x0,
570 rect.y0,
571 rect.x1.saturating_sub(rect.x0),
572 rect.y1.saturating_sub(rect.y0),
573 );
574 idx += 1;
575 }
576 virtio_present = Some((db as *const u8, fb.info.stride, regions, fb.dirty.len));
577 } else {
578 let dst = fb.info.base_virt as *mut u8;
579 let stride = fb.info.stride as usize;
580 let mut idx = 0;
581 while idx < fb.dirty.len {
582 let rect = fb.dirty.rects[idx];
583 let x = rect.x0;
584 let y = rect.y0;
585 let width = rect.x1.saturating_sub(rect.x0);
586 let height = rect.y1.saturating_sub(rect.y0);
587 let row_bytes = width as usize * 4;
588 for row in 0..height as usize {
589 let row_y = y as usize + row;
590 let src_off = row_y * stride + x as usize * 4;
591 let dst_off = src_off;
592 unsafe {
593 core::ptr::copy_nonoverlapping(
594 db.add(src_off),
595 dst.add(dst_off),
596 row_bytes,
597 );
598 }
599 }
600 idx += 1;
601 }
602 }
603 fb.dirty.clear();
604 fb.present_pending = false;
605 fb.last_present_tick = crate::process::scheduler::ticks();
606 }
607
608 if let Some((src, src_stride, regions, region_count)) = virtio_present {
609 if let Some(gpu) = gpu::get_gpu() {
610 let mut idx = 0;
611 while idx < region_count {
612 let (px, py, pw, ph) = regions[idx];
613 let _ = gpu.present_from_linear(src, src_stride, px, py, pw, ph);
614 idx += 1;
615 }
616 }
617 }
618 }
619
620 pub fn set_double_buffering(enable: bool) {
622 let mut fb = FRAMEBUFFER.lock();
623 if let Some(ref mut f) = fb.as_mut() {
624 f.use_double_buffer = enable && f.double_buffer.is_some();
625 }
626 }
627}
628
629#[derive(Clone, Copy)]
631pub struct RgbColor {
632 pub r: u8,
633 pub g: u8,
634 pub b: u8,
635}
636
637impl RgbColor {
638 pub const BLACK: Self = Self { r: 0, g: 0, b: 0 };
639 pub const WHITE: Self = Self {
640 r: 255,
641 g: 255,
642 b: 255,
643 };
644 pub const RED: Self = Self { r: 255, g: 0, b: 0 };
645 pub const GREEN: Self = Self { r: 0, g: 255, b: 0 };
646 pub const BLUE: Self = Self { r: 0, g: 0, b: 255 };
647 pub const CYAN: Self = Self {
648 r: 0,
649 g: 255,
650 b: 255,
651 };
652 pub const MAGENTA: Self = Self {
653 r: 255,
654 g: 0,
655 b: 255,
656 };
657 pub const YELLOW: Self = Self {
658 r: 255,
659 g: 255,
660 b: 0,
661 };
662}
663
664pub fn init() {
666 log::info!("[FB] Initializing framebuffer subsystem...");
667
668 if gpu::is_available() {
670 if let Err(e) = Framebuffer::init_virtio_gpu() {
671 log::warn!("[FB] VirtIO GPU init failed: {}", e);
672 } else {
673 log::info!("[FB] Using VirtIO GPU framebuffer");
674 return;
675 }
676 }
677
678 if Framebuffer::is_available() {
681 log::info!("[FB] Using Limine framebuffer");
682 } else {
683 log::warn!("[FB] No framebuffer available");
684 }
685}