/// This example demonstrates how to draw a 2D glyph using wgpu. /// /// Please note that the character would look better with antialiasing, /// but in order to keep the sample simple this was ommited. /// use meshtext::{Glyph, MeshGenerator, MeshText}; use std::borrow::Cow; use wgpu::util::DeviceExt; use winit::{ event::{Event, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::Window, }; const SHADER: &'static str = r##" @vertex fn vs_main(@location(0) position: vec2) -> @builtin(position) vec4 { let scale = vec3(2.0, 2.0, 2.0); return vec4(position.x * scale.x - 0.5, position.y * scale.y - 0.5, 0.0, 1.0); } @fragment fn fs_main() -> @location(0) vec4 { return vec4(0.0, 0.0, 0.0, 1.0); } "##; async fn run(event_loop: EventLoop<()>, window: Window, vertex_data: &[u8], vertex_count: u32) { let size = window.inner_size(); let instance = wgpu::Instance::new(wgpu::InstanceDescriptor { backends: wgpu::Backends::all(), dx12_shader_compiler: wgpu::Dx12Compiler::Fxc, flags: wgpu::InstanceFlags::default(), gles_minor_version: wgpu::Gles3MinorVersion::Automatic, }); let surface = unsafe { instance .create_surface(&window) .expect("Failed to create surface.") }; let adapter = instance .request_adapter(&wgpu::RequestAdapterOptions { power_preference: wgpu::PowerPreference::default(), force_fallback_adapter: false, compatible_surface: Some(&surface), }) .await .expect("Failed to find an appropriate adapter"); let (device, queue) = adapter .request_device( &wgpu::DeviceDescriptor { label: None, features: wgpu::Features::empty(), limits: wgpu::Limits::downlevel_webgl2_defaults() .using_resolution(adapter.limits()), }, None, ) .await .expect("Failed to create device"); let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { label: None, source: wgpu::ShaderSource::Wgsl(Cow::Borrowed(SHADER)), }); let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { label: None, bind_group_layouts: &[], push_constant_ranges: &[], }); let capabilities = surface.get_capabilities(&adapter); let swapchain_format = *capabilities .formats .first() .expect("Surface does not support any texture format."); let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("Vertex Buffer"), contents: vertex_data, usage: wgpu::BufferUsages::VERTEX, }); let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor { label: None, layout: Some(&pipeline_layout), vertex: wgpu::VertexState { module: &shader, entry_point: "vs_main", buffers: &[wgpu::VertexBufferLayout { array_stride: std::mem::size_of::() as wgpu::BufferAddress * 2, step_mode: wgpu::VertexStepMode::Vertex, attributes: &wgpu::vertex_attr_array![0 => Float32x2], }], }, fragment: Some(wgpu::FragmentState { module: &shader, entry_point: "fs_main", targets: &[Some(swapchain_format.into())], }), primitive: wgpu::PrimitiveState { topology: wgpu::PrimitiveTopology::TriangleList, front_face: wgpu::FrontFace::Ccw, cull_mode: Some(wgpu::Face::Back), ..Default::default() }, depth_stencil: None, multisample: wgpu::MultisampleState::default(), multiview: None, }); let mut config = wgpu::SurfaceConfiguration { alpha_mode: wgpu::CompositeAlphaMode::Auto, format: swapchain_format, height: size.height, present_mode: wgpu::PresentMode::Fifo, usage: wgpu::TextureUsages::RENDER_ATTACHMENT, view_formats: vec![], width: size.width, }; surface.configure(&device, &config); event_loop.run(move |event, _, control_flow| { let _ = (&instance, &adapter, &shader, &pipeline_layout); *control_flow = ControlFlow::Wait; match event { Event::WindowEvent { event: WindowEvent::Resized(size), .. } => { config.width = size.width; config.height = size.height; surface.configure(&device, &config); window.request_redraw(); } Event::RedrawRequested(_) => { let frame = surface .get_current_texture() .expect("Failed to acquire next swap chain texture"); let view = frame .texture .create_view(&wgpu::TextureViewDescriptor::default()); let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None }); { let mut rpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { label: None, color_attachments: &[Some(wgpu::RenderPassColorAttachment { view: &view, resolve_target: None, ops: wgpu::Operations { load: wgpu::LoadOp::Clear(wgpu::Color::WHITE), store: wgpu::StoreOp::Store, }, })], depth_stencil_attachment: None, occlusion_query_set: None, timestamp_writes: None, }); rpass.set_vertex_buffer(0, vertex_buffer.slice(..)); rpass.set_pipeline(&render_pipeline); rpass.draw(0..vertex_count, 0..1); } queue.submit(Some(encoder.finish())); frame.present(); } Event::WindowEvent { event: WindowEvent::CloseRequested, .. } => *control_flow = ControlFlow::Exit, _ => {} } }); } fn get_vertices_for_a() -> Vec { let character = 'A'; let font_data = include_bytes!("../assets/font/FiraMono-Regular.ttf"); let mut generator = MeshGenerator::new(font_data); let result: MeshText = generator .generate_glyph_2d(character, None) .expect("Failed to generate glyph."); result.vertices } fn main() { let event_loop = EventLoop::new(); let window = winit::window::Window::new(&event_loop).unwrap(); window.set_inner_size(winit::dpi::LogicalSize::new(600, 600)); let text_vertices = get_vertices_for_a(); let mut raw_data: Vec = Vec::new(); for vert in text_vertices.iter() { raw_data.extend_from_slice(vert.to_le_bytes().as_slice()); } pollster::block_on(run( event_loop, window, raw_data.as_slice(), text_vertices.len() as u32 / 2, )); }