first push, simple setup to use opengl

It's all insired in copy/paste style by: https://github.com/Nercury/rust-and-opengl-lessons
This commit is contained in:
Lapin 2021-07-21 19:39:59 +02:00
commit cf64bec257
24 changed files with 3139 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
.*.sw*
target
*.profraw

11
lib/config/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
name = "config"
version = "0.1.0"
authors = []
edition = "2018"
[dependencies]
failure = "0.1.3"
slab = "0.4"
metrohash = "1.0.6"
resources = { path = "../resources" }

76
lib/config/src/lib.rs Normal file
View File

@ -0,0 +1,76 @@
use std::rc::Rc;
use std::cell::RefCell;
pub struct Config {
shared: Rc<RefCell<shared::InnerConfig>>,
}
impl Config {
pub fn new(res: resources::Resource) -> Config {
Config {
shared: Rc::new(RefCell::new(shared::InnerConfig::new(res)))
}
}
pub fn pick<T>(&self, name: &str) -> Pick<T>
where T: Default
{
let (index, mut data) = self.shared.borrow_mut().pick(name)
.unwrap_or_else(|| panic!("config section {:?} is already in use", name));
let value = if data.is_none() {
T::default()
} else {
unimplemented!("parse value from data")
};
Pick {
value,
shared: self.shared.clone(),
}
}
}
pub struct Pick<T> {
value: T,
shared: Rc<RefCell<shared::InnerConfig>>,
}
mod shared {
use slab::Slab;
use resources::Resource;
use metrohash::MetroHashMap;
struct SlabData {
}
pub struct InnerConfig {
sections: Slab<SlabData>,
section_name_index: MetroHashMap<String, usize>,
res: resources::Resource,
}
impl InnerConfig {
pub fn new(res: Resource) -> InnerConfig {
InnerConfig {
sections: Slab::new(),
section_name_index: MetroHashMap::default(),
res
}
}
pub fn pick(&mut self, section_name: &str) -> Option<(usize, Option<String>)> {
let existing_section = self.section_name_index.get(section_name).map(|v| *v);
match existing_section {
Some(_) => None,
None => {
let index = self.sections.insert(SlabData {});
self.section_name_index.insert(section_name.to_string(), index);
Some((index, None)) // TODO: load and return section config
}
}
}
}
}

11
lib/gl/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
name = "gl"
version = "0.1.0"
authors = ["Nerijus Arlauskas <nercury@gmail.com>"]
[build-dependencies]
gl_generator = "0.9"
gl_generator_profiling_struct = "0.1"
[features]
debug = []

25
lib/gl/build.rs Normal file
View File

@ -0,0 +1,25 @@
extern crate gl_generator;
extern crate gl_generator_profiling_struct;
use gl_generator::{Api, Fallbacks, Profile, Registry};
use gl_generator_profiling_struct::ProfilingStructGenerator;
use std::env;
use std::fs::File;
use std::path::Path;
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
let mut file_gl = File::create(&Path::new(&out_dir).join("bindings.rs")).unwrap();
let registry = Registry::new(
Api::Gl,
(4, 5),
Profile::Core,
Fallbacks::All,
["GL_NV_command_list"],
);
registry
.write_bindings(ProfilingStructGenerator, &mut file_gl)
.unwrap();
}

33
lib/gl/src/lib.rs Normal file
View File

@ -0,0 +1,33 @@
mod bindings {
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
}
use std::ops::Deref;
use std::rc::Rc;
pub use crate::bindings::Gl as InnerGl;
pub use crate::bindings::*;
#[derive(Clone)]
pub struct Gl {
inner: Rc<bindings::Gl>,
}
impl Gl {
pub fn load_with<F>(loadfn: F) -> Gl
where
F: FnMut(&'static str) -> *const types::GLvoid,
{
Gl {
inner: Rc::new(bindings::Gl::load_with(loadfn)),
}
}
}
impl Deref for Gl {
type Target = bindings::Gl;
fn deref(&self) -> &bindings::Gl {
&self.inner
}
}

23
lib/resources/Cargo.toml Normal file
View File

@ -0,0 +1,23 @@
[package]
name = "resources"
version = "0.1.0"
authors = ["Nerijus Arlauskas <nercury@gmail.com>"]
[features]
default = []
backend_in_memory = []
backend_miniz = ["miniz_oxide"]
backend_filesystem = []
backend_filesystem_watch = ["backend_filesystem", "notify"]
[dependencies]
failure = "0.1.3"
slab = "0.4"
twox-hash = "1.1"
log = "0.4.6"
miniz_oxide = { version = "0.1", optional = true }
notify = { version = "4.0.0", optional = true }
[dev-dependencies]
miniz_oxide = "0.1"

View File

@ -0,0 +1,217 @@
use crate::backend::{Backend, BackendSyncPoint, Modification};
use std::path::{Path, PathBuf};
use std::{fs, io};
use std::collections::VecDeque;
use crate::{Error, ResourcePath};
use std::sync::Mutex;
#[cfg(feature = "backend_filesystem_watch")]
mod watch_impl {
use std::collections::VecDeque;
use std::path::{Path, PathBuf};
use std::sync::mpsc::{channel, Receiver, TryRecvError};
use std::time::{Duration, Instant};
use notify::{RecommendedWatcher, Watcher as NotifyWatcher, RecursiveMode, DebouncedEvent};
use crate::backend::{BackendSyncPoint, Modification};
use crate::{ResourcePathBuf};
pub struct Watcher {
root_path: PathBuf,
_watcher: RecommendedWatcher,
receiver: Receiver<DebouncedEvent>,
outdated_at: Option<Instant>,
}
impl Watcher {
pub fn new(root_path: &Path) -> Option<Watcher> {
let (tx, rx) = channel();
let mut watcher: RecommendedWatcher = NotifyWatcher::new(tx, Duration::from_millis(50))
.map_err(|e| error!("failed to create watcher for {:?}, {:?}", root_path, e))
.ok()?;
watcher.watch(root_path, RecursiveMode::Recursive).ok()?;
Some(Watcher {
root_path: root_path.into(),
_watcher: watcher,
receiver: rx,
outdated_at: None,
})
}
pub fn notify_changes_synced(&mut self, point: BackendSyncPoint) {
if let Some(last_outdated) = self.outdated_at {
if point.instant == last_outdated {
self.outdated_at = None;
}
}
}
pub fn new_changes(&mut self, queue: &mut VecDeque<Modification>) -> Option<BackendSyncPoint> {
let mut something_outdated = false;
loop {
match self.receiver.try_recv() {
Ok(event) => {
match event {
DebouncedEvent::Create(path) => {
if let Some(resource_path) = ResourcePathBuf::from_filesystem_path(&self.root_path, &path) {
queue.push_back(Modification::Create(resource_path));
something_outdated = true;
} else {
warn!("unrecognised resource path {:?} for {} event", path, "Create")
}
},
DebouncedEvent::Write(path) => {
if let Some(resource_path) = ResourcePathBuf::from_filesystem_path(&self.root_path, &path) {
queue.push_back(Modification::Write(resource_path));
something_outdated = true;
} else {
warn!("unrecognised resource path {:?} for {} event", path, "Write")
}
},
DebouncedEvent::Chmod(path) => {
if let Some(resource_path) = ResourcePathBuf::from_filesystem_path(&self.root_path, &path) {
queue.push_back(Modification::Write(resource_path));
something_outdated = true;
} else {
warn!("unrecognised resource path {:?} for {} event", path, "Write")
}
},
DebouncedEvent::Remove(path) => {
if let Some(resource_path) = ResourcePathBuf::from_filesystem_path(&self.root_path, &path) {
queue.push_back(Modification::Remove(resource_path));
something_outdated = true;
} else {
warn!("unrecognised resource path {:?} for {} event", path, "Remove")
}
},
DebouncedEvent::Rename(from_path, to_path) => {
match (ResourcePathBuf::from_filesystem_path(&self.root_path, &from_path), ResourcePathBuf::from_filesystem_path(&self.root_path, &to_path)) {
(Some(from), Some(to)) => {
queue.push_back(Modification::Rename { from, to });
something_outdated = true;
},
(None, Some(_)) => warn!("unrecognised resource path {:?} for {} event", from_path, "Rename"),
(Some(_), None) => warn!("unrecognised resource path {:?} for {} event", to_path, "Rename"),
(None, None) => warn!("unrecognised resource paths {:?} and {:?} for Rename event", from_path, to_path),
}
},
_ => (),
}
},
Err(TryRecvError::Empty) => break,
Err(TryRecvError::Disconnected) => {
error!("filesystem watcher disconnected");
break;
},
}
}
if something_outdated {
let outdated_at = Instant::now();
self.outdated_at = Some(outdated_at);
Some(BackendSyncPoint { instant: outdated_at })
} else {
None
}
}
}
}
#[cfg(not(feature = "backend_filesystem_watch"))]
mod watch_impl {
use std::collections::VecDeque;
use crate::backend::{BackendSyncPoint, Modification};
pub struct Watcher {}
impl Watcher {
pub fn notify_changes_synced(&mut self, _point: BackendSyncPoint) {}
pub fn new_changes(&mut self, _queue: &mut VecDeque<Modification>) -> Option<BackendSyncPoint> {
None
}
}
}
pub struct FileSystem {
root_path: PathBuf,
can_write: bool,
watch: Option<Mutex<watch_impl::Watcher>>,
}
impl FileSystem {
pub fn from_rel_path<P: AsRef<Path>, RP: AsRef<ResourcePath>>(
root_path: P,
rel_path: RP,
) -> FileSystem {
FileSystem::from_path(resource_name_to_path(root_path.as_ref(), rel_path.as_ref()))
}
pub fn from_path<P: AsRef<Path>>(root_path: P) -> FileSystem {
FileSystem {
root_path: root_path.as_ref().into(),
can_write: false,
watch: None,
}
}
pub fn with_write(mut self) -> Self {
self.can_write = true;
self
}
#[cfg(feature = "backend_filesystem_watch")]
pub fn with_watch(mut self) -> Self {
self.watch = watch_impl::Watcher::new(&self.root_path).map(|v| Mutex::new(v));
self
}
}
impl Backend for FileSystem {
fn can_write(&self) -> bool {
self.can_write
}
fn exists(&self, path: &ResourcePath) -> bool {
resource_name_to_path(&self.root_path, path).exists()
}
fn notify_changes_synced(&mut self, point: BackendSyncPoint) {
if let Some(ref mut watch) = self.watch {
watch.lock().unwrap().notify_changes_synced(point);
}
}
fn new_changes(&mut self, queue: &mut VecDeque<Modification>) -> Option<BackendSyncPoint> {
if let Some(ref mut watch) = self.watch {
watch.lock().unwrap().new_changes(queue)
} else {
None
}
}
fn read_into(&mut self, path: &ResourcePath, mut output: &mut io::Write) -> Result<(), Error> {
let path = resource_name_to_path(&self.root_path, path);
let mut reader = io::BufReader::new(fs::File::open(path)?);
io::copy(&mut reader, &mut output)?;
Ok(())
}
fn write_from(&mut self, _path: &ResourcePath, _buffer: &mut io::Read) -> Result<(), Error> {
unimplemented!()
}
}
fn resource_name_to_path(root_dir: &Path, location: &ResourcePath) -> PathBuf {
let mut path: PathBuf = root_dir.into();
for part in location.items() {
path = path.join(part);
}
path
}

View File

@ -0,0 +1,106 @@
use crate::backend::{Backend, BackendSyncPoint};
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
use std::io;
use std::sync::{Arc, RwLock};
use twox_hash::XxHash;
use crate::{Error, ResourcePath, ResourcePathBuf};
#[derive(Debug)]
struct Shared {
map: HashMap<ResourcePathBuf, Vec<u8>, BuildHasherDefault<XxHash>>,
unsynced_change_time: Option<BackendSyncPoint>,
}
impl Shared {
pub fn new() -> Shared {
Shared {
map: HashMap::default(),
unsynced_change_time: None,
}
}
pub fn insert(&mut self, key: &ResourcePath, value: &[u8]) {
self.map.insert(key.as_ref().into(), value.into());
}
}
#[derive(Debug)]
pub struct InMemory {
shared: Arc<RwLock<Shared>>,
}
impl InMemory {
pub fn new() -> InMemory {
InMemory {
shared: Arc::new(RwLock::new(Shared::new())),
}
}
pub fn with<P: AsRef<ResourcePath>>(self, key: P, value: &[u8]) -> Self {
self.shared
.write()
.expect("failed to lock InMemory for write")
.insert(key.as_ref(), value);
self
}
}
impl Backend for InMemory {
fn can_write(&self) -> bool {
true
}
fn exists(&self, path: &ResourcePath) -> bool {
self.shared
.read()
.expect("failed to lock InMemory for read")
.map
.contains_key::<ResourcePath>(path.as_clean_str().as_ref())
}
fn notify_changes_synced(&mut self, point: BackendSyncPoint) {
let mut shared_ref = self
.shared
.write()
.expect("failed to lock InMemory for write");
if shared_ref.unsynced_change_time == Some(point) {
shared_ref.unsynced_change_time = None;
}
}
fn new_changes(&mut self) -> Option<BackendSyncPoint> {
self.shared
.read()
.expect("failed to lock InMemory for read")
.unsynced_change_time
}
fn read_into(&mut self, path: &ResourcePath, output: &mut io::Write) -> Result<(), Error> {
let shared = self
.shared
.read()
.expect("failed to lock InMemory for read");
let item_ref = match shared.map.get(path) {
None => return Err(Error::NotFound),
Some(val) => val,
};
output.write_all(&item_ref)?;
Ok(())
}
fn write_from(&mut self, path: &ResourcePath, buffer: &mut io::Read) -> Result<(), Error> {
let mut data = Vec::new();
buffer.read_to_end(&mut data)?;
let mut shared = self
.shared
.write()
.expect("failed to lock InMemory for write");
shared.map.insert(path.into(), data);
shared.unsynced_change_time = Some(BackendSyncPoint::now());
Ok(())
}
}

View File

@ -0,0 +1,90 @@
extern crate miniz_oxide as miniz;
use crate::backend::{Backend, BackendSyncPoint};
use failure;
use std::io;
use crate::{Error, ResourcePath, ResourcePathBuf};
#[derive(Debug)]
pub struct Miniz<T>
where
T: Backend,
{
inner: T,
level: u8,
}
impl<T> Miniz<T>
where
T: Backend,
{
pub fn new(inner: T, level: u8) -> Miniz<T> {
Miniz { inner, level }
}
}
impl<T> Backend for Miniz<T>
where
T: Backend,
{
fn can_write(&self) -> bool {
self.inner.can_write()
}
fn exists(&self, path: &ResourcePath) -> bool {
self.inner.exists(path)
}
fn notify_changes_synced(&mut self, point: BackendSyncPoint) {
self.inner.notify_changes_synced(point);
}
fn new_changes(&mut self) -> Option<BackendSyncPoint> {
self.inner.new_changes()
}
fn read_into(&mut self, path: &ResourcePath, output: &mut io::Write) -> Result<(), Error> {
let mut input_data = Vec::new();
self.inner.read_into(path, &mut input_data)?;
let output_data =
self::miniz::inflate::decompress_to_vec_zlib(&mut input_data).map_err(write_error)?;
output.write_all(&output_data[..])?;
Ok(())
}
fn write_from(&mut self, path: &ResourcePath, buffer: &mut io::Read) -> Result<(), Error> {
let mut input_data = Vec::new();
buffer.read_to_end(&mut input_data)?;
let output_data = self::miniz::deflate::compress_to_vec_zlib(&mut input_data, self.level);
let mut cursor = io::Cursor::new(output_data);
Ok(self.inner.write_from(path, &mut cursor)?)
}
}
#[derive(Fail, Debug)]
pub enum MinizError {
#[fail(display = "Miniz error {:?}", _0)]
ErrorCode(self::miniz::inflate::TINFLStatus),
}
fn write_error(miniz_error: self::miniz::inflate::TINFLStatus) -> Error {
Error::BackendFailedToWrite {
path: ResourcePathBuf::from(String::from("")),
inner: failure::Error::from(MinizError::ErrorCode(miniz_error)),
}
}
#[cfg(test)]
mod test {
use crate::backend::{Backend, InMemory, Miniz};
#[test]
fn test_can_write_and_read() {
let mut be = Miniz::new(InMemory::new(), 9);
be.write("x".into(), b"hello world").unwrap();
let result = be.read_vec("x".into()).unwrap();
assert_eq!(b"hello world", &result[..]);
}
}

View File

@ -0,0 +1,62 @@
use crate::path::{ResourcePath, ResourcePathBuf};
use std::collections::VecDeque;
use std::io;
use std::time::Instant;
use crate::Error;
#[cfg(any(test, feature = "backend_in_memory"))]
mod in_memory;
#[cfg(any(test, feature = "backend_in_memory"))]
pub use self::in_memory::InMemory;
#[cfg(any(test, feature = "backend_miniz"))]
mod miniz;
#[cfg(any(test, feature = "backend_miniz"))]
pub use self::miniz::Miniz;
#[cfg(any(test, feature = "backend_filesystem"))]
mod filesystem;
#[cfg(any(test, feature = "backend_filesystem"))]
pub use self::filesystem::FileSystem;
#[derive(Eq, PartialEq, Copy, Clone, Debug)]
pub struct BackendSyncPoint {
pub (crate) instant: Instant,
}
#[derive(Eq, PartialEq,Clone, Debug)]
pub enum Modification {
Create(ResourcePathBuf),
Write(ResourcePathBuf),
Remove(ResourcePathBuf),
Rename { from: ResourcePathBuf, to: ResourcePathBuf },
}
impl BackendSyncPoint {
pub fn now() -> BackendSyncPoint {
BackendSyncPoint {
instant: Instant::now(),
}
}
}
pub trait Backend: Send + Sync {
fn can_write(&self) -> bool;
fn exists(&self, path: &ResourcePath) -> bool;
fn notify_changes_synced(&mut self, point: BackendSyncPoint);
fn new_changes(&mut self, queue: &mut VecDeque<Modification>) -> Option<BackendSyncPoint>;
fn read_into(&mut self, path: &ResourcePath, output: &mut io::Write) -> Result<(), Error>;
fn read_vec(&mut self, path: &ResourcePath) -> Result<Vec<u8>, Error> {
let mut output = Vec::new();
self.read_into(path, &mut output)?;
Ok(output)
}
fn write_from(&mut self, path: &ResourcePath, buffer: &mut io::Read) -> Result<(), Error>;
fn write(&mut self, path: &ResourcePath, mut value: &[u8]) -> Result<(), Error> {
self.write_from(path, &mut value)?;
Ok(())
}
}

View File

@ -0,0 +1,36 @@
use failure;
use std::io;
use crate::ResourcePathBuf;
#[derive(Debug, Fail)]
pub enum Error {
#[fail(display = "I/O error")]
Io(#[cause] io::Error),
#[fail(display = "Item not found")]
NotFound,
#[fail(display = "Backend can not write")]
NotWritable,
#[fail(display = "Failed to write {}, {}", path, inner)]
BackendFailedToWrite {
path: ResourcePathBuf,
inner: failure::Error,
},
}
impl From<io::Error> for Error {
fn from(other: io::Error) -> Self {
Error::Io(other)
}
}
impl ::std::cmp::PartialEq for Error {
fn eq(&self, other: &Error) -> bool {
match (self, other) {
(Error::Io(_), Error::Io(_)) => true,
(Error::NotFound, Error::NotFound) => true,
(Error::NotWritable, Error::NotWritable) => true,
(a, b) if a == b => true,
_ => false,
}
}
}

372
lib/resources/src/lib.rs Normal file
View File

@ -0,0 +1,372 @@
#[macro_use]
extern crate failure;
extern crate slab;
extern crate twox_hash;
#[macro_use]
extern crate log;
#[cfg(feature = "backend_filesystem_watch")]
extern crate notify;
mod path;
pub use self::path::{ResourcePath, ResourcePathBuf};
mod shared;
use self::shared::{InternalSyncPoint, SharedResources, UserKey};
pub mod backend;
mod error;
pub use self::error::Error;
use std::sync::Arc;
use std::sync::RwLock;
use std::time::Instant;
pub struct SyncPoint(InternalSyncPoint);
#[derive(Clone)]
pub struct Resources {
shared: Arc<RwLock<SharedResources>>,
}
impl Resources {
pub fn new() -> Resources {
Resources {
shared: Arc::new(RwLock::new(SharedResources::new())),
}
}
pub fn loaded_from<L: backend::Backend + 'static>(
self,
loader_id: &str,
order: isize,
backend: L,
) -> Resources {
self.insert_loader(loader_id, order, backend);
self
}
pub fn insert_loader<L: backend::Backend + 'static>(
&self,
loader_id: &str,
order: isize,
backend: L,
) {
let mut resources = self.shared.write().expect("failed to lock for write");
resources.insert_loader(loader_id, order, backend);
}
pub fn remove_loader(&self, loader_id: &str) {
let mut resources = self.shared.write().expect("failed to lock for write");
resources.remove_loader(loader_id);
}
pub fn resource<P: AsRef<ResourcePath>>(&self, path: P) -> Resource {
Resource {
shared: self.shared.clone(),
key: self
.shared
.write()
.expect("failed to lock for write")
.new_resource_user(path),
}
}
pub fn new_changes(&self) -> Option<SyncPoint> {
self.shared
.write()
.expect("failed to lock for write")
.new_changes()
.map(|p| SyncPoint(p))
}
pub fn notify_changes_synced(&self, sync_point: SyncPoint) {
self.shared
.write()
.expect("failed to lock for write")
.notify_changes_synced(sync_point.0)
}
}
pub struct Resource {
shared: Arc<RwLock<SharedResources>>,
key: UserKey,
}
impl Resource {
pub fn name(&self) -> String {
let shared_ref = &self.shared;
let resources = shared_ref.read().expect("failed to lock for read");
resources
.get_resource_path(self.key)
.map(|p| p.to_string())
.expect("expected resource to have access to the name")
}
/// Check if this resource exists.
///
/// This unreliable command can tell if at least one backend can return the resource at this moment.
/// Not that the next moment the resource can be gone.
pub fn exists(&self) -> bool {
let shared_ref = &self.shared;
let resources = shared_ref.read().expect("failed to lock for read");
resources
.get_resource_path_backend_containing_resource(self.key)
.map(|(path, _, b)| b.exists(path))
.unwrap_or(false)
}
/// Read value from the backend that has highest order number and contains the resource.
pub fn get(&self) -> Result<Vec<u8>, Error> {
let shared_ref = &self.shared;
let mut resources = shared_ref.write().expect("failed to lock for write");
let mut did_read = None;
{
for (path, modification_time, backend) in resources.resource_backends(self.key) {
match backend.read_vec(path) {
Ok(result) => {
did_read = Some((modification_time, result));
break;
}
Err(Error::NotFound) => continue,
Err(e) => return Err(e),
}
}
}
if let Some((modification_time, result)) = did_read {
resources.notify_did_read(self.key, modification_time);
return Ok(result);
}
Err(Error::NotFound)
}
/// Write value to the backend that has highest order number and can write.
pub fn write(&self, data: &[u8]) -> Result<(), Error> {
let shared_ref = &self.shared;
let mut resources = shared_ref.write().expect("failed to lock for write");
let mut did_write = false;
{
for (path, _, backend) in resources.resource_backends(self.key) {
match backend.write(path, data) {
Ok(()) => {
did_write = true;
break;
}
Err(Error::NotWritable) => continue,
Err(e) => return Err(e),
}
}
}
if did_write {
resources.notify_did_write(self.key, Instant::now());
return Ok(());
}
Err(Error::NotWritable)
}
pub fn is_modified(&self) -> bool {
let resources = self.shared.read().expect("failed to lock for read");
resources
.get_path_user_metadata(self.key)
.map(|m| m.outdated_at.is_some())
.unwrap_or(false)
}
}
impl Clone for Resource {
fn clone(&self) -> Self {
let new_key = {
let mut resources = self.shared.write().expect("failed to lock for write");
resources.append_resource_user(self.key.resource_id)
};
Resource {
shared: self.shared.clone(),
key: new_key,
}
}
}
impl Drop for Resource {
fn drop(&mut self) {
let mut resources = self.shared.write().expect("failed to lock for write");
resources.remove_resource_user(self.key);
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn with_no_loaders_should_have_no_reader() {
let res = Resources::new();
assert!(!res.resource("a").exists());
}
#[test]
fn should_read_value() {
let res =
Resources::new().loaded_from("a", 0, backend::InMemory::new().with("name", b"hello"));
assert_eq!(&res.resource("name").get().unwrap(), b"hello");
}
#[test]
fn there_should_be_no_changes_and_resources_should_not_be_modified_at_start() {
let res =
Resources::new().loaded_from("a", 0, backend::InMemory::new().with("name", b"hello"));
assert!(res.new_changes().is_none());
let resource_proxy_a = res.resource("name");
let resource_proxy_b = res.resource("name");
let resource_proxy_clone_a = resource_proxy_a.clone();
let resource_proxy_clone_b = resource_proxy_b.clone();
assert!(res.new_changes().is_none());
assert!(!resource_proxy_a.is_modified());
assert!(!resource_proxy_b.is_modified());
assert!(!resource_proxy_clone_a.is_modified());
assert!(!resource_proxy_clone_b.is_modified());
}
#[test]
fn writing_resource_should_produce_change_sync_point_and_other_resource_proxies_should_see_it_as_modified() {
let res =
Resources::new().loaded_from("a", 0, backend::InMemory::new().with("name", b"hello"));
let resource_proxy_a = res.resource("name");
let resource_proxy_b = res.resource("name");
let resource_proxy_clone_a = resource_proxy_a.clone();
let resource_proxy_clone_b = resource_proxy_b.clone();
assert!(resource_proxy_b.write(b"world").is_ok());
assert!(res.new_changes().is_some());
assert!(resource_proxy_a.is_modified());
assert!(
!resource_proxy_b.is_modified(),
"the most recent written item is assumed to be up to date"
);
assert!(resource_proxy_clone_a.is_modified());
assert!(resource_proxy_clone_b.is_modified());
}
#[test]
fn notifying_changes_synced_should_clear_syn_point() {
let res =
Resources::new().loaded_from("a", 0, backend::InMemory::new().with("name", b"hello"));
let resource_proxy_a = res.resource("name");
let resource_proxy_b = res.resource("name");
resource_proxy_b.write(b"world").unwrap();
assert!(res.new_changes().is_some());
let point = res.new_changes().unwrap();
res.notify_changes_synced(point);
assert!(
resource_proxy_a.is_modified(),
"resources remain marked as modified until read"
);
assert!(
!resource_proxy_b.is_modified(),
"last written resource looses modified state"
);
assert!(res.new_changes().is_none());
}
#[test]
fn notifying_changes_synced_should_not_clear_syn_point_if_there_were_new_writes() {
let res =
Resources::new().loaded_from("a", 0, backend::InMemory::new().with("name", b"hello"));
let resource_proxy_a = res.resource("name");
let resource_proxy_b = res.resource("name");
resource_proxy_b.write(b"world").unwrap();
assert!(res.new_changes().is_some());
let point = res.new_changes().unwrap();
resource_proxy_a.write(b"world2").unwrap();
res.notify_changes_synced(point);
assert!(
resource_proxy_b.is_modified(),
"resources remain marked as modified until read"
);
assert!(
!resource_proxy_a.is_modified(),
"last written resource looses modified state"
);
assert!(res.new_changes().is_some());
}
#[test]
fn removing_the_loader_should_invalidate_resource() {
let res =
Resources::new().loaded_from("a", 0, backend::InMemory::new().with("name", b"hello"));
let resource_proxy_a = res.resource("name");
res.remove_loader("a");
assert!(res.new_changes().is_some());
let point = res.new_changes().unwrap();
assert!(
resource_proxy_a.is_modified(),
"removed loader should trigger modified flag on resource"
);
res.notify_changes_synced(point);
assert!(res.new_changes().is_none());
}
#[test]
fn adding_the_loader_should_override_resource_and_invalidate_it() {
let res =
Resources::new().loaded_from("a", 0, backend::InMemory::new().with("name", b"hello"));
let resource_proxy_a = res.resource("name");
res.insert_loader("b", 1, backend::InMemory::new().with("name", b"world"));
assert!(res.new_changes().is_some());
let point = res.new_changes().unwrap();
assert!(
resource_proxy_a.is_modified(),
"adding loader should trigger modified flag on resource"
);
assert_eq!(&resource_proxy_a.get().unwrap(), b"world");
assert!(
!resource_proxy_a.is_modified(),
"reading resouce should mark it read"
);
res.notify_changes_synced(point);
assert!(res.new_changes().is_none());
}
}

849
lib/resources/src/path.rs Normal file
View File

@ -0,0 +1,849 @@
/*!
Resource path implementation.
Universal resource path help to query resources the same way across different platforms and backends.
*/
#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd)]
pub struct ResourcePathBuf {
inner: String,
}
#[derive(Hash, Eq, PartialEq, Ord, PartialOrd)]
pub struct ResourcePath {
inner: str,
}
impl ResourcePath {
fn from_inner(inner: &str) -> &ResourcePath {
unsafe { &*(inner as *const str as *const ResourcePath) }
}
}
impl ::std::ops::Deref for ResourcePathBuf {
type Target = ResourcePath;
fn deref(&self) -> &ResourcePath {
&ResourcePath::from_inner(&self.inner[..])
}
}
impl AsRef<ResourcePath> for str {
fn as_ref(&self) -> &ResourcePath {
&ResourcePath::from_inner(self)
}
}
impl AsRef<ResourcePath> for String {
fn as_ref(&self) -> &ResourcePath {
&ResourcePath::from_inner(&self)
}
}
impl AsRef<ResourcePath> for ResourcePathBuf {
fn as_ref(&self) -> &ResourcePath {
&ResourcePath::from_inner(&self.inner)
}
}
impl<'a> From<&'a ResourcePath> for ResourcePathBuf {
fn from(other: &ResourcePath) -> Self {
ResourcePathBuf {
inner: other.inner.into(),
}
}
}
impl<'a> From<&'a str> for &'a ResourcePath {
fn from(other: &'a str) -> Self {
&ResourcePath::from_inner(other)
}
}
impl From<String> for ResourcePathBuf {
fn from(other: String) -> Self {
ResourcePathBuf { inner: other }
}
}
impl ::std::borrow::Borrow<ResourcePath> for ResourcePathBuf {
fn borrow(&self) -> &ResourcePath {
&ResourcePath::from_inner(&self.inner)
}
}
impl AsRef<ResourcePath> for ResourcePath {
fn as_ref(&self) -> &ResourcePath {
self
}
}
// ---- IMPL ----
impl ResourcePath {
pub fn parent(&self) -> Option<&ResourcePath> {
match self.inner.rfind('/') {
Some(index) => Some(ResourcePath::from_inner(&self.inner[..index])),
None => if &self.inner == "" {
None
} else {
Some(ResourcePath::from_inner(""))
},
}
}
pub fn to_string(&self) -> String {
self.inner.into()
}
pub fn items(&self) -> impl Iterator<Item = &str> {
self.inner.split('/')
}
/// Returns path as str and ensures that the returned str does not have a leading or trailing slash
pub fn as_clean_str(&self) -> &str {
let mut result = &self.inner;
if result.starts_with('/') {
result = &result[1..];
}
if result.ends_with('/') {
result = &result[..1];
}
result
}
pub fn join<P: AsRef<ResourcePath>>(&self, other: P) -> ResourcePathBuf {
let left = self.as_clean_str();
let right = other.as_ref().as_clean_str();
if left.is_empty() {
return ResourcePathBuf::from(right.as_ref());
}
if right.is_empty() {
return ResourcePathBuf::from(left.as_ref());
}
ResourcePathBuf {
inner: [left, "/", right].concat(),
}
}
pub fn to_filesystem_path(&self, root_dir: &::std::path::Path) -> ::std::path::PathBuf {
let mut path: ::std::path::PathBuf = root_dir.into();
for part in self.items() {
path = path.join(sanitize_path_component(part).as_ref());
}
path
}
}
impl ResourcePathBuf {
pub fn from_filesystem_path(root_dir: &::std::path::Path, path: &::std::path::Path) -> Option<Self> {
let relative_dir = path.strip_prefix(root_dir).ok()?;
let mut path = ResourcePathBuf { inner: String::with_capacity(relative_dir.as_os_str().len() + 32) };
for part in relative_dir.components() {
if let Some(unsanitized_part) = unsanitize_path_component(part.as_os_str()) {
path = path.join(unsanitized_part.as_ref());
} else {
return None;
}
}
Some(path)
}
}
// ---- Formatting ----
use std::fmt;
impl fmt::Debug for ResourcePath {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.debug_tuple("ResourcePath").field(&&self.inner).finish()
}
}
impl fmt::Display for ResourcePath {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt::Display::fmt(&self.inner, f)
}
}
impl fmt::Debug for ResourcePathBuf {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.debug_tuple("ResourcePathBuf").field(&self.inner).finish()
}
}
impl fmt::Display for ResourcePathBuf {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt::Display::fmt(&self.inner, f)
}
}
// ---- Other utils ---
use std::borrow::Cow;
struct FixSolution<'s> {
problematic_sequence_len: usize,
fix: FixOutput<'s>,
}
enum FixOutput<'s> {
/// Insert a slice to the byt output
Single(&'s [u8]),
/// Insert 3 slices to byte output
Triple(&'s [u8], &'s [u8], &'s [u8]),
}
/// Check if the subsequent string requires a fix
///
/// The fixes here should be reversible. It should be possible to reconstruct the
/// resource name from the sanitized output.
fn check_for_sanitize_fix(previous_len: usize, remainder: &[u8]) -> Option<FixSolution> {
let next_char = remainder[0];
if previous_len == 0 && remainder.len() >= 3 {
match next_char {
b'C' | b'c' => {
let c1 = remainder[1].to_ascii_lowercase();
let c2 = remainder[2].to_ascii_lowercase();
let c3 = remainder.iter().skip(3).next().cloned();
let c4 = remainder.iter().skip(4).next().cloned();
match (c1, c2, c3, c4) {
(b'o', b'n', None, None) => {
return Some(FixSolution {
problematic_sequence_len: 3,
fix: FixOutput::Triple(b"+r", remainder, b"+"),
})
}
(b'o', b'n', Some(b'.'), _) => {
return Some(FixSolution {
problematic_sequence_len: 3,
fix: FixOutput::Triple(b"+r", &remainder[..3], b"+"),
})
}
(b'o', b'm', Some(b'1'...b'9'), None) => {
return Some(FixSolution {
problematic_sequence_len: 4,
fix: FixOutput::Triple(b"+r", remainder, b"+"),
})
}
(b'o', b'm', Some(b'1'...b'9'), Some(b'.')) => {
return Some(FixSolution {
problematic_sequence_len: 4,
fix: FixOutput::Triple(b"+r", &remainder[..4], b"+"),
})
}
_ => (),
}
}
b'P' | b'p' => {
let c1 = remainder[1].to_ascii_lowercase();
let c2 = remainder[2].to_ascii_lowercase();
let c3 = remainder.iter().skip(3).next().cloned();
match (c1, c2, c3) {
(b'r', b'n', None) => {
return Some(FixSolution {
problematic_sequence_len: 3,
fix: FixOutput::Triple(b"+r", remainder, b"+"),
})
}
(b'r', b'n', Some(b'.')) => {
return Some(FixSolution {
problematic_sequence_len: 3,
fix: FixOutput::Triple(b"+r", &remainder[..3], b"+"),
})
}
_ => (),
}
}
b'A' | b'a' => {
let c1 = remainder[1].to_ascii_lowercase();
let c2 = remainder[2].to_ascii_lowercase();
let c3 = remainder.iter().skip(3).next().cloned();
match (c1, c2, c3) {
(b'u', b'x', None) => {
return Some(FixSolution {
problematic_sequence_len: 3,
fix: FixOutput::Triple(b"+r", remainder, b"+"),
})
}
(b'u', b'x', Some(b'.')) => {
return Some(FixSolution {
problematic_sequence_len: 3,
fix: FixOutput::Triple(b"+r", &remainder[..3], b"+"),
})
}
_ => (),
}
}
b'N' | b'n' => {
let c1 = remainder[1].to_ascii_lowercase();
let c2 = remainder[2].to_ascii_lowercase();
let c3 = remainder.iter().skip(3).next().cloned();
match (c1, c2, c3) {
(b'u', b'l', None) => {
return Some(FixSolution {
problematic_sequence_len: 3,
fix: FixOutput::Triple(b"+r", remainder, b"+"),
})
}
(b'u', b'l', Some(b'.')) => {
return Some(FixSolution {
problematic_sequence_len: 3,
fix: FixOutput::Triple(b"+r", &remainder[..3], b"+"),
})
}
_ => (),
}
}
b'L' | b'l' if remainder.len() >= 4 => {
let c1 = remainder[1].to_ascii_lowercase();
let c2 = remainder[2].to_ascii_lowercase();
let c3 = remainder[3];
let c4 = remainder.iter().skip(4).next().cloned();
match (c1, c2, c3, c4) {
(b'p', b't', b'1'...b'9', None) => {
return Some(FixSolution {
problematic_sequence_len: 4,
fix: FixOutput::Triple(b"+r", remainder, b"+"),
})
}
(b'p', b't', b'1'...b'9', Some(b'.')) => {
return Some(FixSolution {
problematic_sequence_len: 4,
fix: FixOutput::Triple(b"+r", &remainder[..4], b"+"),
})
}
_ => (),
}
}
_ => (),
}
}
match next_char {
b'\\' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+b+"),
}),
b'+' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"++"),
}),
b'<' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+lt+"),
}),
b'>' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+gt+"),
}),
b':' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+c+"),
}),
b'\"' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+q+"),
}),
b'/' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+sl+"),
}),
b'|' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+p+"),
}),
b'?' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+m+"),
}),
b'*' => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+a+"),
}),
i @ 1..=31 => Some(FixSolution {
problematic_sequence_len: 1,
fix: match i {
1 => FixOutput::Single(b"+i1+"),
2 => FixOutput::Single(b"+i2+"),
3 => FixOutput::Single(b"+i3+"),
4 => FixOutput::Single(b"+i4+"),
5 => FixOutput::Single(b"+i5+"),
6 => FixOutput::Single(b"+i6+"),
7 => FixOutput::Single(b"+i7+"),
8 => FixOutput::Single(b"+i8+"),
9 => FixOutput::Single(b"+i9+"),
10 => FixOutput::Single(b"+i10+"),
11 => FixOutput::Single(b"+i11+"),
12 => FixOutput::Single(b"+i12+"),
13 => FixOutput::Single(b"+i13+"),
14 => FixOutput::Single(b"+i14+"),
15 => FixOutput::Single(b"+i15+"),
16 => FixOutput::Single(b"+i16+"),
17 => FixOutput::Single(b"+i17+"),
18 => FixOutput::Single(b"+i18+"),
19 => FixOutput::Single(b"+i19+"),
20 => FixOutput::Single(b"+i20+"),
21 => FixOutput::Single(b"+i21+"),
22 => FixOutput::Single(b"+i22+"),
23 => FixOutput::Single(b"+i23+"),
24 => FixOutput::Single(b"+i24+"),
25 => FixOutput::Single(b"+i25+"),
26 => FixOutput::Single(b"+i26+"),
27 => FixOutput::Single(b"+i27+"),
28 => FixOutput::Single(b"+i28+"),
29 => FixOutput::Single(b"+i29+"),
30 => FixOutput::Single(b"+i30+"),
31 => FixOutput::Single(b"+i31+"),
_ => unreachable!("should be in range 1 - 31"),
},
}),
b'.' if remainder.len() == 1 => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+d+"),
}),
b' ' if remainder.len() == 1 => Some(FixSolution {
problematic_sequence_len: 1,
fix: FixOutput::Single(b"+s+"),
}),
_ => None,
}
}
enum SanitizeState {
/// Nothing was encountered that would need fixing
Good { position: usize },
/// Something was fixed, and the buffer for fixes was allocated
Fixed { buffer: Vec<u8> },
}
/// Apply the fix based on previous sanitization state and fix output that was returned from requires_sanitize_fix
fn apply_sanitize_fix(
problematic_sequence_len: usize,
replacement: FixOutput,
remainder: &mut &[u8],
state: SanitizeState,
all_bytes: &[u8],
) -> SanitizeState {
match state {
SanitizeState::Fixed { mut buffer } => {
match replacement {
FixOutput::Single(replacement) => buffer.extend_from_slice(replacement),
FixOutput::Triple(a, b, c) => {
buffer.extend_from_slice(a);
buffer.extend_from_slice(b);
buffer.extend_from_slice(c);
}
}
*remainder = &remainder[problematic_sequence_len..];
SanitizeState::Fixed { buffer }
}
SanitizeState::Good { position } => {
let mut buffer = Vec::with_capacity(1024);
buffer.extend_from_slice(&all_bytes[..position]);
match replacement {
FixOutput::Single(replacement) => buffer.extend_from_slice(replacement),
FixOutput::Triple(a, b, c) => {
buffer.extend_from_slice(a);
buffer.extend_from_slice(b);
buffer.extend_from_slice(c);
}
}
*remainder = &remainder[problematic_sequence_len..];
SanitizeState::Fixed { buffer }
}
}
}
/// Create a version of path string that is safe to use as filesystem path component,
/// provided that it is not empty.
pub fn sanitize_path_component(component: &str) -> Cow<str> {
let bytes = component.as_bytes();
let mut remainder = bytes;
let mut state = SanitizeState::Good { position: 0 };
'main: loop {
state = match state {
SanitizeState::Good { .. } => {
let mut index = 0;
loop {
if remainder.len() == 0 {
return Cow::from(component);
}
if let Some(s) = check_for_sanitize_fix(index, remainder) {
state = apply_sanitize_fix(
s.problematic_sequence_len,
s.fix,
&mut remainder,
SanitizeState::Good { position: index },
bytes,
);
continue 'main;
}
index += 1;
remainder = &remainder[1..];
}
}
SanitizeState::Fixed { mut buffer } => {
if remainder.len() == 0 {
return Cow::from(
String::from_utf8(buffer).expect("expected valid utf8 sequence"),
);
}
if let Some(s) = check_for_sanitize_fix(buffer.len(), remainder) {
apply_sanitize_fix(
s.problematic_sequence_len,
s.fix,
&mut remainder,
SanitizeState::Fixed { buffer },
bytes,
)
} else {
buffer.extend_from_slice(&remainder[..1]);
remainder = &remainder[1..];
SanitizeState::Fixed { buffer }
}
}
};
}
}
use std::ffi::OsStr;
pub fn unsanitize_path_component(component: &OsStr) -> Option<Cow<str>> {
#[derive(Copy, Clone)]
enum FixState {
Underscore,
Scan,
}
enum UnsanitizeState {
Fixed { bytes: Vec<u8>, state: FixState, position: usize },
ReuseSameString,
}
let part = component.to_string_lossy();
if part.len() == 0 {
return Some(part);
}
let state = {
let bytes = part.as_ref().as_bytes();
let bytes_len = bytes.len();
let mut position = 0;
loop {
if bytes[position] == b'+' {
let mut ok_data = Vec::with_capacity(bytes_len);
ok_data.extend(bytes.iter().take(position));
break UnsanitizeState::Fixed { bytes: ok_data, state: FixState::Underscore, position: position + 1 };
}
position += 1;
if position >= bytes_len {
break UnsanitizeState::ReuseSameString;
}
}
};
match state {
UnsanitizeState::ReuseSameString => return Some(part),
UnsanitizeState::Fixed { mut bytes, mut state, mut position } => {
let src_bytes = part.as_ref().as_bytes();
let src_bytes_len = src_bytes.len();
loop {
match state {
FixState::Underscore => {
let remaining_len = src_bytes_len - position;
if remaining_len == 0 {
return None;
}
let next_char = src_bytes[position];
if remaining_len > 0 && next_char == b'+' {
bytes.push(b'+');
position += 1;
state = FixState::Scan;
} else if remaining_len > 4 && next_char == b'r' && src_bytes[position + 4] == b'+' {
bytes.extend_from_slice(&src_bytes[position + 1..position + 4]);
position += 5;
state = FixState::Scan;
} else if remaining_len > 5 && next_char == b'r' && src_bytes[position + 5] == b'+' {
bytes.extend_from_slice(&src_bytes[position + 1..position + 5]);
position += 6;
state = FixState::Scan;
} else if remaining_len > 2 && next_char == b'i' {
let next_char2 = src_bytes[position + 1];
let next_char3 = src_bytes[position + 2];
match (next_char2, next_char3) {
(b'1', b'+') => bytes.push(1),
(b'2', b'+') => bytes.push(2),
(b'3', b'+') => bytes.push(3),
(b'4', b'+') => bytes.push(4),
(b'5', b'+') => bytes.push(5),
(b'6', b'+') => bytes.push(6),
(b'7', b'+') => bytes.push(7),
(b'8', b'+') => bytes.push(8),
(b'9', b'+') => bytes.push(9),
_ => if remaining_len > 3 {
let next_char4 = src_bytes[position + 3];
match (next_char2, next_char3, next_char4) {
(b'1', b'0', b'+') => bytes.push(10),
(b'1', b'1', b'+') => bytes.push(11),
(b'1', b'2', b'+') => bytes.push(12),
(b'1', b'3', b'+') => bytes.push(13),
(b'1', b'4', b'+') => bytes.push(14),
(b'1', b'5', b'+') => bytes.push(15),
(b'1', b'6', b'+') => bytes.push(16),
(b'1', b'7', b'+') => bytes.push(17),
(b'1', b'8', b'+') => bytes.push(18),
(b'1', b'9', b'+') => bytes.push(19),
(b'2', b'0', b'+') => bytes.push(20),
(b'2', b'1', b'+') => bytes.push(21),
(b'2', b'2', b'+') => bytes.push(22),
(b'2', b'3', b'+') => bytes.push(23),
(b'2', b'4', b'+') => bytes.push(24),
(b'2', b'5', b'+') => bytes.push(25),
(b'2', b'6', b'+') => bytes.push(26),
(b'2', b'7', b'+') => bytes.push(27),
(b'2', b'8', b'+') => bytes.push(28),
(b'2', b'9', b'+') => bytes.push(29),
(b'3', b'0', b'+') => bytes.push(30),
(b'3', b'1', b'+') => bytes.push(31),
_ => return None,
}
position += 1;
},
}
position += 3;
state = FixState::Scan;
} else if remaining_len > 1 {
let next_char2 = src_bytes[position + 1];
match (next_char, next_char2) {
(b'd', b'+') => bytes.push(b'.'),
(b'b', b'+') => bytes.push(b'\\'),
(b'c', b'+') => bytes.push(b':'),
(b'q', b'+') => bytes.push(b'\"'),
(b'p', b'+') => bytes.push(b'|'),
(b'm', b'+') => bytes.push(b'?'),
(b'a', b'+') => bytes.push(b'*'),
(b's', b'+') => bytes.push(b' '),
_ => if remaining_len > 2 {
let next_char3 = src_bytes[position + 2];
match (next_char, next_char2, next_char3) {
(b'l', b't', b'+') => bytes.push(b'<'),
(b'g', b't', b'+') => bytes.push(b'>'),
(b's', b'l', b'+') => bytes.push(b'/'),
_ => return None,
}
position += 1;
},
}
position += 2;
state = FixState::Scan;
} else { return None }
},
FixState::Scan => {
if position == src_bytes_len {
break;
}
let next_char = src_bytes[position];
if next_char == b'+' {
state = FixState::Underscore;
} else {
bytes.push(next_char);
}
position += 1;
}
}
}
Some(Cow::from(String::from_utf8(bytes).expect("bytes already undergone lossy conversion to utf8")))
}
}
}
#[cfg(test)]
mod normalize_path_tests {
use super::{sanitize_path_component, unsanitize_path_component};
use std::ffi::OsString;
fn check(sanitized: &str, unsanitized: &str) {
assert_eq!(sanitized, sanitize_path_component(unsanitized).as_ref());
assert_eq!(unsanitized, unsanitize_path_component(&OsString::from(sanitized)).as_ref());
}
#[test]
fn test_common() {
// this is not valid path, but not a concern of this function
check("", "");
// + is the start of the escape sequence, so this escapes the escape sequence
check("++", "+");
check("++++", "++");
// kill path traversing
check("+d+", ".");
check(".+d+", "..");
// simple unsanitized names
check("hello world", "hello world");
check("hello-world", "hello-world");
check("hello_world", "hello_world");
// underscore handling
assert_eq!("quad+.vert", unsanitize_path_component(&OsString::from("quad+.vert")).as_ref());
}
#[test]
fn test_windows() {
check("+b+", "\\");
check("+b++b+", "\\\\");
check("+lt+", "<");
check("+lt++lt+", "<<");
check("+gt+", ">");
check("+gt++gt+", ">>");
check("+c+", ":");
check("+c++c+", "::");
check("+q+", "\"");
check("+q++q+", "\"\"");
check("+sl+", "/");
check("+sl++sl+", "//");
check("+p+", "|");
check("+p++p+", "||");
check("+m+", "?");
check("+m++m+", "??");
check("+a+", "*");
check("+a++a+", "**");
for i in 1u8..=31 {
let mut output = String::new();
output.push_str("+i");
output.push_str(&format!("{}", i));
output.push('+');
let mut input = String::new();
input.push(i as char);
check(&output, &input);
let mut output = String::new();
output.push_str("+i");
output.push_str(&format!("{}", i));
output.push('+');
output.push_str("+i");
output.push_str(&format!("{}", i));
output.push('+');
let mut input = String::new();
input.push(i as char);
input.push(i as char);
check(&output, &input);
}
check("hello+s+", "hello ");
check("hello+d+", "hello.");
check("hello +s+", "hello ");
check("hello.+d+", "hello..");
check(" hello +s+", " hello ");
check(".hello.+d+", ".hello..");
for reserved_name in &[
"CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7",
"COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
] {
let seq = format!("{}", &reserved_name);
check(
&format!("+r{}+", seq),
&seq
);
let seq = format!("{}", reserved_name.to_lowercase());
check(
&format!("+r{}+", seq),
&seq
);
let seq = format!("{}", title_case(reserved_name));
check(
&format!("+r{}+", seq),
&seq
);
let seq = format!("{}", &reserved_name);
let input = format!("{}.txt", &seq);
let output = format!("+r{}+.txt", &seq);
check(&output, &input);
let seq = format!("{}", &reserved_name);
let input = format!("{}.", &seq);
let output = format!("+r{}++d+", &seq);
check(&output, &input);
let seq = format!("{}", &reserved_name);
let input = format!("{}.a", &seq);
let output = format!("+r{}+.a", &seq);
check(&output, &input);
let seq = format!("{}", &reserved_name);
let input = format!("hi {} and bye", &seq);
let output = format!("hi {} and bye", &seq);
check(&output, &input);
}
}
fn title_case(value: &str) -> String {
value
.chars()
.enumerate()
.flat_map(|(i, c)| {
if i == 0 {
Box::new(c.to_uppercase()) as Box<Iterator<Item = char>>
} else {
Box::new(c.to_lowercase()) as Box<Iterator<Item = char>>
}
})
.collect()
}
}

View File

@ -0,0 +1,371 @@
use crate::backend::{Backend, BackendSyncPoint, Modification};
use crate::path::{ResourcePath, ResourcePathBuf};
use slab::Slab;
use std::collections::{HashMap, BTreeMap, VecDeque};
use std::hash::BuildHasherDefault;
use std::time::Instant;
use twox_hash::XxHash;
mod resource_metadata;
use self::resource_metadata::{ResourceMetadata, ResourceUserMetadata};
#[derive(Clone, Debug, Eq, PartialEq)]
struct LoaderKey {
id: String,
order: isize,
}
impl Ord for LoaderKey {
fn cmp(&self, other: &LoaderKey) -> ::std::cmp::Ordering {
match self.order.cmp(&other.order) {
::std::cmp::Ordering::Equal => self.id.cmp(&other.id),
ordering => ordering,
}
}
}
impl PartialOrd for LoaderKey {
fn partial_cmp(&self, other: &LoaderKey) -> Option<::std::cmp::Ordering> {
Some(self.cmp(other))
}
}
#[derive(Copy, Clone)]
pub struct UserKey {
pub resource_id: usize,
user_id: usize,
}
#[derive(Eq, PartialEq, Copy, Clone)]
pub enum InternalSyncPoint {
Backend {
backend_hash: u64,
sync_point: BackendSyncPoint,
},
Everything {
time: Instant,
},
}
pub struct SharedResources {
resource_metadata: Slab<ResourceMetadata>,
path_resource_ids: HashMap<ResourcePathBuf, usize, BuildHasherDefault<XxHash>>,
backends: BTreeMap<LoaderKey, Box<Backend>>,
outdated_at: Option<Instant>,
modification_queue: VecDeque<Modification>,
}
fn backend_hash(id: &str) -> u64 {
use std::hash::Hasher;
let mut hasher = XxHash::with_seed(8745287);
hasher.write(id.as_bytes());
hasher.finish()
}
impl SharedResources {
pub fn new() -> SharedResources {
SharedResources {
resource_metadata: Slab::with_capacity(1024), // 1024 files is enough for everyone
path_resource_ids: HashMap::default(),
backends: BTreeMap::new(),
outdated_at: None,
modification_queue: VecDeque::new(),
}
}
pub fn new_changes(&mut self) -> Option<InternalSyncPoint> {
if let Some(instant) = self.outdated_at {
return Some(InternalSyncPoint::Everything { time: instant });
}
let mut new_change_point = None;
let mut mod_queue = ::std::mem::replace(&mut self.modification_queue, VecDeque::new());
for (key, backend) in self.backends.iter_mut() {
mod_queue.clear();
if let Some(sync_point) = backend.new_changes(&mut mod_queue) {
new_change_point = Some(InternalSyncPoint::Backend {
backend_hash: backend_hash(&key.id),
sync_point,
});
break;
}
}
if let Some(InternalSyncPoint::Backend { backend_hash: bh, sync_point }) = new_change_point {
let mut some_resource_is_modified = false;
while let Some(modification) = mod_queue.pop_front() {
match modification {
Modification::Create(p) => {
if let Some(resource_id) = self.path_resource_ids.get(&p) {
if let Some(ref mut meta) = self.resource_metadata.get_mut(*resource_id) {
meta.everyone_should_reload(sync_point.instant);
some_resource_is_modified = true;
}
}
},
Modification::Write(p) => {
if let Some(resource_id) = self.path_resource_ids.get(&p) {
if let Some(ref mut meta) = self.resource_metadata.get_mut(*resource_id) {
meta.everyone_should_reload(sync_point.instant);
some_resource_is_modified = true;
}
}
},
Modification::Remove(p) => {
if let Some(resource_id) = self.path_resource_ids.get(&p) {
if let Some(ref mut meta) = self.resource_metadata.get_mut(*resource_id) {
meta.everyone_should_reload(sync_point.instant);
some_resource_is_modified = true;
}
}
},
Modification::Rename { from, to } => {
if let (Some(resource_id), Some(resource_id_to)) = (self.path_resource_ids.get(&from), self.path_resource_ids.get(&to)) {
if let Some(ref mut meta) = self.resource_metadata.get_mut(*resource_id) {
meta.everyone_should_reload(sync_point.instant);
some_resource_is_modified = true;
}
if let Some(ref mut meta) = self.resource_metadata.get_mut(*resource_id_to) {
meta.everyone_should_reload(sync_point.instant);
some_resource_is_modified = true;
}
}
},
}
}
if let false = some_resource_is_modified {
for (key, backend) in self.backends.iter_mut() {
if backend_hash(&key.id) == bh {
backend.notify_changes_synced(sync_point);
break;
}
}
new_change_point = None;
}
}
::std::mem::replace(&mut self.modification_queue, mod_queue);
new_change_point
}
pub fn notify_changes_synced(&mut self, sync_point: InternalSyncPoint) {
match sync_point {
InternalSyncPoint::Everything { time } => if self.outdated_at == Some(time) {
self.outdated_at = None;
},
InternalSyncPoint::Backend {
backend_hash: bh,
sync_point: sp,
} => {
for (key, backend) in self.backends.iter_mut() {
if backend_hash(&key.id) == bh {
backend.notify_changes_synced(sp);
break;
}
}
}
}
}
pub fn new_resource_user<P: AsRef<ResourcePath>>(&mut self, path: P) -> UserKey {
let clean_path_str: &ResourcePath = path.as_ref().as_clean_str().into();
let maybe_id = self.path_resource_ids.get(clean_path_str).cloned();
match maybe_id {
Some(id) => self.append_resource_user(id),
None => {
let mut metadata = ResourceMetadata::new(clean_path_str);
let user_id = metadata.new_user();
let resource_id = self.resource_metadata.insert(metadata);
self.path_resource_ids
.insert(ResourcePathBuf::from(clean_path_str), resource_id);
UserKey {
resource_id,
user_id,
}
}
}
}
/// Appends user to resource, the resource id must exist.
pub fn append_resource_user(&mut self, resource_id: usize) -> UserKey {
UserKey {
resource_id,
user_id: self
.resource_metadata
.get_mut(resource_id)
.expect("expected resource_id to exist when appending new user")
.new_user(),
}
}
pub fn remove_resource_user(&mut self, key: UserKey) {
let has_users = {
if let Some(metadata) = self.resource_metadata.get_mut(key.resource_id) {
metadata.remove_user(key.user_id);
Some(metadata.has_users())
} else {
None
}
};
if let Some(false) = has_users {
let metadata = self.resource_metadata.remove(key.resource_id);
self.path_resource_ids.remove(&metadata.path);
}
}
pub fn get_path_user_metadata(&self, key: UserKey) -> Option<&ResourceUserMetadata> {
self.resource_metadata
.get(key.resource_id)
.and_then(|path_metadata| path_metadata.get_user_metadata(key.user_id))
}
fn get_path_user_metadata_mut(&mut self, key: UserKey) -> Option<&mut ResourceUserMetadata> {
self.resource_metadata
.get_mut(key.resource_id)
.and_then(|path_metadata| path_metadata.get_user_metadata_mut(key.user_id))
}
pub fn insert_loader<L: Backend + 'static>(
&mut self,
loader_id: &str,
order: isize,
backend: L,
) {
let outdated_at = Instant::now();
for (path, resource_id) in self.path_resource_ids.iter() {
if backend.exists(&path) {
if let Some(metadata) = self.resource_metadata.get_mut(*resource_id) {
metadata.everyone_should_reload(outdated_at);
}
}
}
self.backends.insert(
LoaderKey {
id: loader_id.into(),
order,
},
Box::new(backend) as Box<Backend>,
);
if self.path_resource_ids.len() > 0 {
self.outdated_at = Some(outdated_at);
}
}
pub fn remove_loader(&mut self, loader_id: &str) {
let outdated_at = Instant::now();
let remove_keys: Vec<_> = self
.backends
.keys()
.filter(|k| k.id == loader_id)
.map(|k| k.clone())
.collect();
for removed_key in remove_keys {
if let Some(removed_backend) = self.backends.remove(&removed_key) {
for (path, resource_id) in self.path_resource_ids.iter() {
if removed_backend.exists(&path) {
if let Some(metadata) = self.resource_metadata.get_mut(*resource_id) {
metadata.everyone_should_reload(outdated_at);
}
}
}
}
}
if self.path_resource_ids.len() > 0 {
self.outdated_at = Some(outdated_at);
}
}
pub fn resource_backends(
&mut self,
key: UserKey,
) -> impl Iterator<Item = (&ResourcePath, Option<Instant>, &mut Box<Backend>)> {
let path_with_modification_time =
self.resource_metadata.get(key.resource_id).and_then(|m| {
m.users
.get(key.user_id)
.map(|u| (m.path.as_ref(), u.outdated_at))
});
self.backends.iter_mut().rev().filter_map(move |(_, b)| {
path_with_modification_time.map(move |(path, instant)| (path, instant, b))
})
}
#[allow(dead_code)]
pub fn get_resource_path_backend(
&self,
backend_id: &str,
key: UserKey,
) -> Option<(&ResourcePath, Option<Instant>, &Box<Backend>)> {
let path_with_modification_time =
self.resource_metadata.get(key.resource_id).and_then(|m| {
m.users
.get(key.user_id)
.map(|u| (m.path.as_ref(), u.outdated_at))
});
if let (Some((path, modification_time)), Some((_, backend))) = (
path_with_modification_time,
self.backends
.iter()
.filter(|(k, _)| &k.id == backend_id)
.next(),
) {
return Some((path, modification_time, backend));
}
None
}
pub fn get_resource_path(&self, key: UserKey) -> Option<&ResourcePath> {
self.resource_metadata
.get(key.resource_id)
.map(|m| m.path.as_ref())
}
pub fn get_resource_path_backend_containing_resource(
&self,
key: UserKey,
) -> Option<(&ResourcePath, Option<Instant>, &Box<Backend>)> {
let path_with_modification_time =
self.resource_metadata.get(key.resource_id).and_then(|m| {
m.users
.get(key.user_id)
.map(|u| (m.path.as_ref(), u.outdated_at))
});
if let Some((path, modification_time)) = path_with_modification_time {
for backend in self.backends.values().rev() {
if backend.exists(path) {
return Some((path, modification_time, backend));
}
}
}
None
}
pub fn notify_did_read(&mut self, key: UserKey, modified_time: Option<Instant>) {
if let Some(metadata) = self.get_path_user_metadata_mut(key) {
if metadata.outdated_at == modified_time {
metadata.outdated_at = None;
}
}
}
pub fn notify_did_write(&mut self, key: UserKey, modified_time: Instant) {
if let Some(metadata) = self.resource_metadata.get_mut(key.resource_id) {
metadata.everyone_should_reload_except(key.user_id, modified_time)
}
}
}

View File

@ -0,0 +1,69 @@
use slab::Slab;
use std::time::Instant;
use crate::{ResourcePath, ResourcePathBuf};
/// Information about the latest resource update.
///
/// If it is none, there are no updates, otherwise it contains a timestamp of the latest update.
pub struct ResourceUserMetadata {
pub outdated_at: Option<Instant>,
}
/// Shared information about the resource.
///
/// Each resource can be owned by multiple proxies (called `Resource`). In that case, every proxy
/// gets an identifier from the `users` slab, and can check for resource updates in
/// `ResourceUserMetadata`.
pub struct ResourceMetadata {
pub path: ResourcePathBuf,
pub users: Slab<ResourceUserMetadata>,
}
impl ResourceMetadata {
pub fn new(path: &ResourcePath) -> ResourceMetadata {
ResourceMetadata {
path: ResourcePathBuf::from(path),
users: Slab::with_capacity(2),
}
}
pub fn new_user(&mut self) -> usize {
self.users
.insert(ResourceUserMetadata { outdated_at: None })
}
pub fn remove_user(&mut self, id: usize) {
self.users.remove(id);
if self.users.len() > 8 && self.users.capacity() / self.users.len() > 2 {
self.users.shrink_to_fit()
}
}
pub fn get_user_metadata(&self, id: usize) -> Option<&ResourceUserMetadata> {
self.users.get(id)
}
pub fn get_user_metadata_mut(&mut self, id: usize) -> Option<&mut ResourceUserMetadata> {
self.users.get_mut(id)
}
pub fn has_users(&mut self) -> bool {
self.users.len() > 0
}
pub fn everyone_should_reload_except(&mut self, id: usize, outdated_at: Instant) {
for (user_id, user) in self.users.iter_mut() {
user.outdated_at = if user_id != id {
Some(outdated_at)
} else {
None
};
}
}
pub fn everyone_should_reload(&mut self, outdated_at: Instant) {
for (_, user) in self.users.iter_mut() {
user.outdated_at = Some(outdated_at);
}
}
}

12
lib/winput/Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[package]
name = "winput"
version = "0.1.0"
authors = []
edition = "2018"
[dependencies]
failure = "0.1.3"
[dependencies.sdl2]
version = "0.34.5"
features = ["bundled", "static-link"]

134
lib/winput/src/lib.rs Normal file
View File

@ -0,0 +1,134 @@
#[macro_use] extern crate failure;
#[derive(Fail, Debug)]
pub enum Error {
#[fail(display = "Failed to initialize windows: {}", _0)]
FailedToInitializeWindows(String),
#[fail(display = "Window height {} overflows", _0)]
HeightOverflows(u32),
#[fail(display = "Window width {} overflows", _0)]
WidthOverflows(u32),
#[fail(display = "Invalid window title")]
InvalidTitle,
#[fail(display = "Failed to create window: {}", _0)]
FailedToCreateWindow(String),
}
#[derive(Debug, Copy, Clone)]
pub struct WindowDimensions {
pub size: WindowSize,
pub hdpi_size: WindowSize,
pub high_dpi: bool,
}
#[derive(Debug, Copy, Clone)]
pub struct WindowSize {
pub width: i32,
pub height: i32,
}
#[derive(Debug, Clone)]
pub struct WindowSettings {
pub dimensions: WindowDimensions,
}
impl Default for WindowSettings {
fn default() -> Self {
WindowSettings {
dimensions: WindowDimensions {
size: WindowSize {
width: 960,
height: 600,
},
hdpi_size: WindowSize {
width: 960,
height: 600,
},
high_dpi: false,
}
}
}
}
pub struct Window {
window: sdl2::video::Window,
settings: WindowSettings,
}
pub struct Windows {
sdl: sdl2::Sdl,
video: sdl2::VideoSubsystem,
}
fn sdl_windows_err(error: String) -> Error {
Error::FailedToInitializeWindows(error)
}
fn sdl_window_err(error: sdl2::video::WindowBuildError) -> Error {
use sdl2::video::WindowBuildError;
match error {
WindowBuildError::HeightOverflows(s) => Error::HeightOverflows(s),
WindowBuildError::WidthOverflows(s) => Error::WidthOverflows(s),
WindowBuildError::InvalidTitle(_) => Error::InvalidTitle,
WindowBuildError::SdlError(s) => Error::FailedToCreateWindow(s),
}
}
impl Windows {
pub fn new() -> Result<Windows, Error> {
let sdl = sdl2::init().map_err(sdl_windows_err)?;
let video = sdl.video().map_err(sdl_windows_err)?;
Ok(Windows {
sdl,
video,
})
}
pub fn create(&self, mut settings: WindowSettings) -> Result<Window, Error> {
let gl_attr = self.video.gl_attr();
gl_attr.set_context_profile(sdl2::video::GLProfile::Core);
gl_attr.set_context_version(4, 1);
gl_attr.set_accelerated_visual(true);
gl_attr.set_double_buffer(true);
gl_attr.set_multisample_buffers(1);
gl_attr.set_multisample_samples(16);
let dims = &mut settings.dimensions;
let mut window = self.video
.window("Demo", dims.size.width as u32, dims.size.height as u32);
let builder = window
.opengl()
.resizable();
if dims.high_dpi {
builder.allow_highdpi();
}
let mut window = builder.build().map_err(sdl_window_err)?;
if dims.high_dpi {
let drawable_size = window.drawable_size();
dims.hdpi_size.width = drawable_size.0 as i32;
dims.hdpi_size.height = drawable_size.1 as i32;
} else {
dims.hdpi_size.width = dims.size.width;
dims.hdpi_size.height = dims.size.height;
}
let mut scale = dims.hdpi_size.width as f32 / dims.size.width as f32;
let mut scale_modifier = 1.0;
// let _gl_context = window.gl_create_context().map_err(sdl_err)?;
// let gl = gl::Gl::load_with(|s| {
// video_subsystem.gl_get_proc_address(s) as *const std::os::raw::c_void
// });
Ok(Window {
window,
settings,
})
}
}

299
test_opengl/Cargo.lock generated Normal file
View File

@ -0,0 +1,299 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "aho-corasick"
version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
dependencies = [
"memchr",
]
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "bitflags"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "cc"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e70cc2f62c6ce1868963827bd677764c62d07c3d9a3e1fb1177ee1a9ab199eb2"
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "cmake"
version = "0.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb6210b637171dfba4cda12e579ac6dc73f5165ad56133e5d72ef3131f320855"
dependencies = [
"cc",
]
[[package]]
name = "crc32fast"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "encoding_rs"
version = "0.8.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80df024fbc5ac80f87dfef0d9f5209a252f2a497f7f42944cff24d8253cac065"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "filetime"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d34cfa13a63ae058bfa601fe9e313bbdb3746427c1459185464ce0fcf62e1e8"
dependencies = [
"cfg-if 1.0.0",
"libc",
"redox_syscall",
"winapi",
]
[[package]]
name = "flate2"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd3aec53de10fe96d7d8c565eb17f2c687bb5518a2ec453b5b1252964526abe0"
dependencies = [
"cfg-if 1.0.0",
"crc32fast",
"libc",
"miniz_oxide",
]
[[package]]
name = "gl"
version = "0.1.0"
dependencies = [
"gl_generator",
"gl_generator_profiling_struct",
]
[[package]]
name = "gl_generator"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a795170cbd85b5a7baa58d6d7525cae6a03e486859860c220f7ebbbdd379d0a"
dependencies = [
"khronos_api",
"log",
"xml-rs",
]
[[package]]
name = "gl_generator_profiling_struct"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f93e1b0666dae88dda1a2d6fe9fb12f17c0b5551d0621e3d753e2c42fc6c067"
dependencies = [
"gl_generator",
]
[[package]]
name = "khronos_api"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "037ab472c33f67b5fbd3e9163a2645319e5356fcd355efa6d4eb7fff4bbcb554"
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "lesson-06-gl-struct"
version = "0.1.0"
dependencies = [
"gl",
"sdl2",
]
[[package]]
name = "libc"
version = "0.2.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320cfe77175da3a483efed4bc0adc1968ca050b098ce4f2f1c13a56626128790"
[[package]]
name = "log"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "memchr"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b16bd47d9e329435e309c58469fe0791c2d0d1ba96ec0954152a5ae2b04387dc"
[[package]]
name = "miniz_oxide"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
dependencies = [
"adler",
"autocfg",
]
[[package]]
name = "redox_syscall"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ab49abadf3f9e1c4bc499e8845e152ad87d2ad2d30371841171169e9d75feee"
dependencies = [
"bitflags",
]
[[package]]
name = "regex"
version = "1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.6.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]]
name = "sdl2"
version = "0.34.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "deecbc3fa9460acff5a1e563e05cb5f31bba0aa0c214bb49a43db8159176d54b"
dependencies = [
"bitflags",
"lazy_static",
"libc",
"sdl2-sys",
]
[[package]]
name = "sdl2-sys"
version = "0.34.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41a29aa21f175b5a41a6e26da572d5e5d1ee5660d35f9f9d0913e8a802098f74"
dependencies = [
"cfg-if 0.1.10",
"cmake",
"flate2",
"libc",
"tar",
"unidiff",
"version-compare",
]
[[package]]
name = "tar"
version = "0.4.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d779dc6aeff029314570f666ec83f19df7280bb36ef338442cfa8c604021b80"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]]
name = "unidiff"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8a62719acf1933bfdbeb73a657ecd9ecece70b405125267dd549e2e2edc232c"
dependencies = [
"encoding_rs",
"lazy_static",
"regex",
]
[[package]]
name = "version-compare"
version = "0.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d63556a25bae6ea31b52e640d7c41d1ab27faba4ccb600013837a3d0b3994ca1"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "xattr"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "244c3741f4240ef46274860397c7c74e50eb23624996930e484c16679633a54c"
dependencies = [
"libc",
]
[[package]]
name = "xml-rs"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c1cb601d29fe2c2ac60a2b2e5e293994d87a1f6fa9687a31a15270f909be9c2"
dependencies = [
"bitflags",
]

14
test_opengl/Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[package]
name = "lesson-06-gl-struct"
version = "0.1.0"
authors = ["Nerijus Arlauskas <nercury@gmail.com>"]
[dependencies]
gl = { path = "../lib/gl" }
[dependencies.sdl2]
version = "0.34.5"
features = ["bundled", "static-link"]
[features]
gl_debug = ["gl/debug"]

138
test_opengl/src/main.rs Normal file
View File

@ -0,0 +1,138 @@
extern crate gl;
extern crate sdl2;
pub mod render_gl;
fn main() {
let sdl = sdl2::init().unwrap();
let video_subsystem = sdl.video().unwrap();
let gl_attr = video_subsystem.gl_attr();
gl_attr.set_context_profile(sdl2::video::GLProfile::Core);
gl_attr.set_context_version(4, 1);
let window = video_subsystem
.window("Game", 900, 700)
.opengl()
.resizable()
.build()
.unwrap();
let _gl_context = window.gl_create_context().unwrap();
let gl = gl::Gl::load_with(|s| {
video_subsystem.gl_get_proc_address(s) as *const std::os::raw::c_void
});
// set up shader program
use std::ffi::CString;
let vert_shader = render_gl::Shader::from_vert_source(
&gl,
&CString::new(include_str!("triangle.vert")).unwrap(),
).unwrap();
let frag_shader = render_gl::Shader::from_frag_source(
&gl,
&CString::new(include_str!("triangle.frag")).unwrap(),
).unwrap();
let shader_program =
render_gl::Program::from_shaders(&gl, &[vert_shader, frag_shader]).unwrap();
// set up vertex buffer object
let vertices: Vec<f32> = vec![
// positions // colors
0.5, -0.5, 0.0, 1.0, 0.0, 0.0, // bottom right
-0.5, -0.5, 0.0, 0.0, 1.0, 0.0, // bottom left
0.0, 0.5, 0.0, 0.0, 0.0, 1.0, // top
];
let mut vbo: gl::types::GLuint = 0;
unsafe {
gl.GenBuffers(1, &mut vbo);
}
unsafe {
gl.BindBuffer(gl::ARRAY_BUFFER, vbo);
gl.BufferData(
gl::ARRAY_BUFFER, // target
(vertices.len() * std::mem::size_of::<f32>()) as gl::types::GLsizeiptr, // size of data in bytes
vertices.as_ptr() as *const gl::types::GLvoid, // pointer to data
gl::STATIC_DRAW, // usage
);
gl.BindBuffer(gl::ARRAY_BUFFER, 0);
}
// set up vertex array object
let mut vao: gl::types::GLuint = 0;
unsafe {
gl.GenVertexArrays(1, &mut vao);
}
unsafe {
gl.BindVertexArray(vao);
gl.BindBuffer(gl::ARRAY_BUFFER, vbo);
gl.EnableVertexAttribArray(0); // this is "layout (location = 0)" in vertex shader
gl.VertexAttribPointer(
0, // index of the generic vertex attribute ("layout (location = 0)")
3, // the number of components per generic vertex attribute
gl::FLOAT, // data type
gl::FALSE, // normalized (int-to-float conversion)
(6 * std::mem::size_of::<f32>()) as gl::types::GLint, // stride (byte offset between consecutive attributes)
std::ptr::null() // offset of the first component
);
gl.EnableVertexAttribArray(1); // this is "layout (location = 0)" in vertex shader
gl.VertexAttribPointer(
1, // index of the generic vertex attribute ("layout (location = 0)")
3, // the number of components per generic vertex attribute
gl::FLOAT, // data type
gl::FALSE, // normalized (int-to-float conversion)
(6 * std::mem::size_of::<f32>()) as gl::types::GLint, // stride (byte offset between consecutive attributes)
(3 * std::mem::size_of::<f32>()) as *const gl::types::GLvoid // offset of the first component
);
gl.BindBuffer(gl::ARRAY_BUFFER, 0);
gl.BindVertexArray(0);
}
// set up shared state for window
unsafe {
gl.Viewport(0, 0, 900, 700);
gl.ClearColor(0.3, 0.3, 0.5, 1.0);
}
// main loop
let mut event_pump = sdl.event_pump().unwrap();
'main: loop {
for event in event_pump.poll_iter() {
match event {
sdl2::event::Event::Quit { .. } => break 'main,
_ => {}
}
}
unsafe {
gl.Clear(gl::COLOR_BUFFER_BIT);
}
// draw triangle
shader_program.set_used();
unsafe {
gl.BindVertexArray(vao);
gl.DrawArrays(
gl::TRIANGLES, // mode
0, // starting index in the enabled arrays
3, // number of indices to be rendered
);
}
window.gl_swap_window();
}
}

View File

@ -0,0 +1,162 @@
use gl;
use std;
use std::ffi::{CStr, CString};
pub struct Program {
gl: gl::Gl,
id: gl::types::GLuint,
}
impl Program {
pub fn from_shaders(gl: &gl::Gl, shaders: &[Shader]) -> Result<Program, String> {
let program_id = unsafe { gl.CreateProgram() };
for shader in shaders {
unsafe {
gl.AttachShader(program_id, shader.id());
}
}
unsafe {
gl.LinkProgram(program_id);
}
let mut success: gl::types::GLint = 1;
unsafe {
gl.GetProgramiv(program_id, gl::LINK_STATUS, &mut success);
}
if success == 0 {
let mut len: gl::types::GLint = 0;
unsafe {
gl.GetProgramiv(program_id, gl::INFO_LOG_LENGTH, &mut len);
}
let error = create_whitespace_cstring_with_len(len as usize);
unsafe {
gl.GetProgramInfoLog(
program_id,
len,
std::ptr::null_mut(),
error.as_ptr() as *mut gl::types::GLchar,
);
}
return Err(error.to_string_lossy().into_owned());
}
for shader in shaders {
unsafe {
gl.DetachShader(program_id, shader.id());
}
}
Ok(Program {
gl: gl.clone(),
id: program_id,
})
}
pub fn id(&self) -> gl::types::GLuint {
self.id
}
pub fn set_used(&self) {
unsafe {
self.gl.UseProgram(self.id);
}
}
}
impl Drop for Program {
fn drop(&mut self) {
unsafe {
self.gl.DeleteProgram(self.id);
}
}
}
pub struct Shader {
gl: gl::Gl,
id: gl::types::GLuint,
}
impl Shader {
pub fn from_source(
gl: &gl::Gl,
source: &CStr,
kind: gl::types::GLenum,
) -> Result<Shader, String> {
let id = shader_from_source(gl, source, kind)?;
Ok(Shader { gl: gl.clone(), id })
}
pub fn from_vert_source(gl: &gl::Gl, source: &CStr) -> Result<Shader, String> {
Shader::from_source(gl, source, gl::VERTEX_SHADER)
}
pub fn from_frag_source(gl: &gl::Gl, source: &CStr) -> Result<Shader, String> {
Shader::from_source(gl, source, gl::FRAGMENT_SHADER)
}
pub fn id(&self) -> gl::types::GLuint {
self.id
}
}
impl Drop for Shader {
fn drop(&mut self) {
unsafe {
self.gl.DeleteShader(self.id);
}
}
}
fn shader_from_source(
gl: &gl::Gl,
source: &CStr,
kind: gl::types::GLenum,
) -> Result<gl::types::GLuint, String> {
let id = unsafe { gl.CreateShader(kind) };
unsafe {
gl.ShaderSource(id, 1, &source.as_ptr(), std::ptr::null());
gl.CompileShader(id);
}
let mut success: gl::types::GLint = 1;
unsafe {
gl.GetShaderiv(id, gl::COMPILE_STATUS, &mut success);
}
if success == 0 {
let mut len: gl::types::GLint = 0;
unsafe {
gl.GetShaderiv(id, gl::INFO_LOG_LENGTH, &mut len);
}
let error = create_whitespace_cstring_with_len(len as usize);
unsafe {
gl.GetShaderInfoLog(
id,
len,
std::ptr::null_mut(),
error.as_ptr() as *mut gl::types::GLchar,
);
}
return Err(error.to_string_lossy().into_owned());
}
Ok(id)
}
fn create_whitespace_cstring_with_len(len: usize) -> CString {
// allocate buffer of correct size
let mut buffer: Vec<u8> = Vec::with_capacity(len + 1);
// fill it with len spaces
buffer.extend([b' '].iter().cycle().take(len));
// convert buffer to CString
unsafe { CString::from_vec_unchecked(buffer) }
}

View File

@ -0,0 +1,12 @@
#version 330 core
in VS_OUTPUT {
vec3 Color;
} IN;
out vec4 Color;
void main()
{
Color = vec4(IN.Color, 1.0f);
}

View File

@ -0,0 +1,14 @@
#version 330 core
layout (location = 0) in vec3 Position;
layout (location = 1) in vec3 Color;
out VS_OUTPUT {
vec3 Color;
} OUT;
void main()
{
gl_Position = vec4(Position, 1.0);
OUT.Color = Color;
}