Skip to content

Commit

Permalink
feat: send user input to server
Browse files Browse the repository at this point in the history
  • Loading branch information
cs50victor committed Jan 28, 2024
1 parent 45875ec commit fe13e87
Show file tree
Hide file tree
Showing 14 changed files with 2,967 additions and 285 deletions.
5 changes: 5 additions & 0 deletions crates/bevy_headless/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,11 @@ impl CurrImage {
pub fn to_web_base64(&self) -> anyhow::Result<String> {
base64_browser_img(&self.img_buffer)
}

pub fn dimensions(&self) -> [u32; 2] {
let (w, h) = self.img_buffer.dimensions();
[w, h]
}
}

#[derive(Debug, Default, Resource, Event)]
Expand Down
3 changes: 1 addition & 2 deletions demo/app/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@ export default function Page() {
return (
<div className="min-h-dvh prose w-full mx-auto">
<div className="prose flex flex-col items-center justify-center">
<h1 className="text-center">Curr Image from New Media</h1>
<WebSocketExample/>
<WebSocketExample display="canvas"/>
</div>
</div>
)
Expand Down
64 changes: 64 additions & 0 deletions demo/components/Canvas.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
"use client"

import { useEffect, useRef, useState } from "react";
import { ServerWSResponse } from "./WebSocketExample";

const validCommands : Record<string, string> = {
"W": "UP",
"A": "LEFT",
"S": "DOWN",
"D": "RIGHT",
"ARROWUP": "ZOOM-IN",
"ARROWDOWN": "ZOOM-OUT"
}

export const Canvas=({img_metadata, socket}:{img_metadata: ServerWSResponse, socket: WebSocket })=>{
const canvasRef = useRef<HTMLCanvasElement>(null);
const [cursorPosition, setCursorPosition] = useState<[number, number]>()
const aspectRatio = 16/9;

useEffect(() => {
if (img_metadata){
const ctx = canvasRef.current?.getContext('2d');
const img = new Image();
img.src = img_metadata.image;
[img.width, img.height] = img_metadata.dimension
img.onload = () => {
ctx?.drawImage(img, 0, 0)
}
}
}, [img_metadata]);

const handle_keyboard_input = (e: KeyboardEvent) =>{
const key = e.key.toUpperCase()
if(key in validCommands){
e.preventDefault();
socket.send(validCommands[key])
console.log("key down -> ", key)
}
}

useEffect(() => {
document.addEventListener("keydown", handle_keyboard_input)
return () => document.removeEventListener("keydown", handle_keyboard_input)
}, []);

return (
<>
<canvas
className="w-screen h-screen"
ref={canvasRef}
width={1000}
height={1000}
onPointerMove={(event) => {
const [x,y] = [event.clientX, event.clientY]
setCursorPosition([Math.round(x),Math.round(y)])
console.log(event.clientX, event.clientY)
}}
onPointerLeave={(_)=>{
setCursorPosition(undefined)
}}
/>
</>
)
}
46 changes: 21 additions & 25 deletions demo/components/WebSocketExample.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,24 @@

import { useEffect, useState } from 'react';
import Image from 'next/image'
import { Canvas } from './Canvas';

type DisplayType = "image" | "canvas"

export interface ServerWSResponse {
image: string
dimension: [number, number]
}

export default function WebSocketExample({port = 8080, display="canvas"}:{port?:number, display:DisplayType}){
const [imgMetadata, setImageMetadata] = useState<ServerWSResponse>();
const [socket, setSocket] = useState<WebSocket>()

export default function WebSocketExample({port = 8080}:{port?:number}){
const [imgUrl, setImgUrl] = useState<string|null>(null);
const [socket, setSocket] = useState<WebSocket|null>(null)

useEffect(() => {
const socket = new WebSocket(`ws://localhost:${port}`)

setSocket(socket);

socket.onopen = () => {
setSocket(socket)
console.log('WebSocket Open');
socket.send("hello");
};
Expand All @@ -27,38 +34,27 @@ export default function WebSocketExample({port = 8080}:{port?:number}){

socket.onmessage = (event) => {
try {
const data = JSON.parse(event.data);
const data : ServerWSResponse = JSON.parse(event.data);
console.log("WebSocket Message:", data);
if (data.image) {
setImgUrl(data.image);
setImageMetadata(data)
}
} catch (e) {
console.error("Error parsing the WebSocket response:", e);
}
};

return () => {
socket.close();
socket?.close();
};
}, []);

return (
<div>
{imgUrl ? (
<div className="h-[80vh] w-[80vw] relative border">
<Image
className="rounded"
src={imgUrl}
alt="Streamed image"
objectFit="cover"
fill
/>
</div>
): (
<p>loading ...</p>
)}
{ (imgMetadata && socket) ? (
display === "canvas" ? <Canvas img_metadata={imgMetadata} socket={socket}/> : <Image src={imgMetadata.image} alt="Streamed image" objectFit="cover" fill/>
) : <p className='text-xl font-semibold'>NEW MEDIA | trying to connect to server...</p>
}
</div>
)
};

// {/* <img style={{border: "1px solid black"}} src={imgUrl} alt="Received from server" /> */}
1 change: 1 addition & 0 deletions demo/public/images/cursor.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file removed demo/public/images/livekit-apple-touch.png
Binary file not shown.
1 change: 0 additions & 1 deletion demo/public/images/livekit-meet-home.svg

This file was deleted.

Binary file removed demo/public/images/livekit-meet-open-graph.png
Binary file not shown.
1 change: 0 additions & 1 deletion demo/public/images/livekit-safari-pinned-tab.svg

This file was deleted.

36 changes: 33 additions & 3 deletions examples/new_media/src/controls.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
use bevy::ecs::{
system::Resource,
world::{FromWorld, World},
use bevy::{
ecs::{
query::With,
system::{Query, Res, Resource},
world::{FromWorld, World},
},
render::camera::Camera,
transform::components::Transform,
};

/// Allows LLM / Model to Control the Bevy World remotely
Expand All @@ -16,3 +21,28 @@ impl FromWorld for WorldControlChannel {
Self { tx, rx }
}
}

pub fn update_world_from_input(
input_receiver: Res<WorldControlChannel>,
mut camera: Query<&mut Transform, With<Camera>>,
) {
let speed = 0.1;
if let Ok(input) = input_receiver.rx.try_recv() {
log::info!("user input : {input}");
match input.as_str() {
"UP" => camera.iter_mut().for_each(|mut transform| transform.translation.y += speed),
"DOWN" => camera.iter_mut().for_each(|mut transform| transform.translation.y -= speed),
"LEFT" => camera.iter_mut().for_each(|mut transform| transform.translation.x -= speed),
"RIGHT" => camera.iter_mut().for_each(|mut transform| transform.translation.x += speed),
"ZOOM-IN" => {
camera.iter_mut().for_each(|mut transform| transform.translation.z -= speed)
},
"ZOOM-OUT" => {
camera.iter_mut().for_each(|mut transform| transform.translation.z += speed)
},
e => {
log::info!("Input received | {e}");
},
}
}
}
9 changes: 6 additions & 3 deletions examples/new_media/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ use bevy_panorbit_camera::{PanOrbitCamera, PanOrbitCameraPlugin};

use bevy_gaussian_splatting::{GaussianCloud, GaussianSplattingBundle, GaussianSplattingPlugin};
use bevy_ws_server::WsPlugin;
use controls::{update_world_from_input, WorldControlChannel};
use server::{receive_message, start_ws};

fn setup_gaussian_cloud(
Expand All @@ -29,7 +30,7 @@ fn setup_gaussian_cloud(
) {
// let remote_file = Some("https://huggingface.co/datasets/cs50victor/splats/resolve/main/train/point_cloud/iteration_7000/point_cloud.gcloud");
// TODO: figure out how to load remote files later
let splat_file = "splats/bonsai/point_cloud/iteration_7000/point_cloud.gcloud";
let splat_file = "splats/counter/point_cloud/iteration_7000/point_cloud.gcloud";
log::info!("loading {}", splat_file);
let cloud = asset_server.load(splat_file.to_string());

Expand Down Expand Up @@ -83,7 +84,9 @@ fn main() {

Engine::new()
.insert_resource(bevy_headless::SceneInfo::new(config.width, config.height))
.insert_resource(ClearColor(Color::rgb_u8(255, 255, 255)))
.init_resource::<WorldControlChannel>()
// .insert_resource(ClearColor(Color::rgb_u8(255, 255, 255)))
.insert_resource(ClearColor(Color::rgb_u8(0, 0, 0)))
.add_plugins((
HeadlessPlugin,
WsPlugin,
Expand All @@ -92,6 +95,6 @@ fn main() {
GaussianSplattingPlugin,
))
.add_systems(Startup, (start_ws, setup_gaussian_cloud))
.add_systems(Update, receive_message)
.add_systems(Update, (receive_message,update_world_from_input))
.run();
}
15 changes: 13 additions & 2 deletions examples/new_media/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,14 @@ use bevy_headless::CurrImageContainer;
use bevy_ws_server::{ReceiveError, WsConnection, WsListener};
use log::info;
use serde_json::json;
use tungstenite::Message;

use std::fmt::Debug;

use serde::{Deserialize, Serialize};

use crate::controls::WorldControlChannel;

#[derive(Debug, Serialize, Deserialize)]
pub struct HttpServerMsg<T> {
data: Option<T>,
Expand Down Expand Up @@ -48,16 +51,24 @@ pub fn start_ws(listener: Res<WsListener>) {
pub fn receive_message(
mut commands: Commands,
curr_image: Res<CurrImageContainer>,
user_input: Res<WorldControlChannel>,
connections: Query<(Entity, &WsConnection)>,
) {
for (entity, conn) in connections.iter() {
loop {
match conn.receive() {
Ok(message) => {
info!("message | {message:?}");
let resp = tungstenite::protocol::Message::Text(
if let Message::Text(msg) = message {
if let Err(e) = user_input.tx.send(msg) {
log::error!("Couldn't send user input to world channel | {e}");
};
}
let curr_img = curr_image.0.lock();
let resp = Message::Text(
json!({
"image": curr_image.0.lock().to_web_base64().unwrap()
"image": curr_img.to_web_base64().unwrap(),
"dimension": curr_img.dimensions()
})
.to_string(),
);
Expand Down
Loading

0 comments on commit fe13e87

Please sign in to comment.