Skip to content

Commit

Permalink
Merge pull request #9 from pilksoc/backend
Browse files Browse the repository at this point in the history
Add LLM trait for future LLM interactions
  • Loading branch information
ettolrach authored Mar 2, 2024
2 parents 2032457 + d1a37a9 commit a49cfeb
Show file tree
Hide file tree
Showing 16 changed files with 253 additions and 5 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -56,3 +56,6 @@ Cargo.lock

backend/target/

.env

.env
1 change: 1 addition & 0 deletions backend/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.env
8 changes: 6 additions & 2 deletions backend/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,14 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
tokio = { version= "1", features = ["full"] }
openai_api_rust = "0.1.8"
diesel = { version = "2.1.0", features = ["postgres"] }
dotenvy = "0.15"
uuid = { version = "1.7.0", features = ["v5", "fast-rng", "macro-diagnostics"] }
tokio = { version = "1", features = ["full"] }
tokio-stream = "0.1.6"
async-trait = "0.1.77"
warp = "0.3"
serde = { version = "1.0", features = ["derive"]}
serde_json = "1.0"
futures = { version = "0.3", default-features=false}
uuid = { version = "1.1.2", features = ["v4", "fast-rng", "macro-diagnostics"]}
6 changes: 6 additions & 0 deletions backend/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Setup
1. Install Rust
2. cargo build
3. echo DATABASE_URL=postgres://username:password@localhost/diesel_demo > .env
4. diesel setup
5. cargo run
22 changes: 22 additions & 0 deletions backend/src/cache.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
use crate::kube::{KubeId, Kube};
use std::vec::Vec;

pub struct Recipe {
items: Vec<KubeId>,
}

impl Recipe {
pub fn new(items: Vec<KubeId>) -> Self {
let mut items = items;
items.sort();
Recipe { items }
}
pub fn hash(&self) -> u64 {
let big_key = self.items.iter().fold(0, |acc, x| acc ^ x.as_u128());
(big_key >> 64 & big_key) as u64
}
}

pub struct PsqlCache {

}
20 changes: 20 additions & 0 deletions backend/src/cache_test.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#[cfg(test)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;

#[test]

fn test_kube_cache_hash_is_not_order_sensitive() {
let mut items = vec![];
items.push(KubeId::new("a"));
items.push(KubeId::new("b"));
items.push(KubeId::new("b"));
items.push(KubeId::new("c"));
let recipe1 = Recipe::new(items.clone());

items.reverse();
let recipe2 = Recipe::new(items.clone());
assert_eq!(recipe1.hash(), recipe2.hash());
}
}
35 changes: 32 additions & 3 deletions backend/src/kube.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,36 @@
#[derive(Debug, PartialEq)]
use uuid::Uuid;

#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct KubeId {
uuid: Uuid,
}

impl KubeId {
pub fn new(name: &str) -> Self {
let mut name = name.to_string();
name.push_str("kube");
KubeId {
uuid: Uuid::new_v5(&Uuid::NAMESPACE_DNS, name.as_bytes()),
}
}

pub fn as_u128(&self) -> u128 {
self.uuid.as_u128()
}
}

#[derive(PartialEq, Debug)]
pub struct Kube {
name: String,
uuid: String, //uuid type?
pub id: KubeId,
pub name: String,
}
impl Kube {
pub fn new(name: String) -> Kube {
Kube {
id: KubeId::new(name.as_str()),
name,
}
}
}

// we should have a placeholder ''loading'' cube we can send over if api is slow
2 changes: 2 additions & 0 deletions backend/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,7 @@ pub mod grid;
pub mod space;
pub mod kube;
pub mod player;
pub mod llm;
pub mod cache;

type Coordinate = [u64; 2];
52 changes: 52 additions & 0 deletions backend/src/llm.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
use crate::kube::Kube;
use async_trait::async_trait;

/// Trait for interacting with an LLM.
#[async_trait]
pub trait LLM {
/// Send a query to the LLM and get a [`std::string::String`] response.
async fn query(input: &str) -> String;
/// Ask the LLM to combine the given Kubes and return a new Kube.
async fn combine(&self, kubes: &[Kube]) -> Kube;
}

/// A fake LLM that functions very basically, not processing the input in any meaningful way. This is most useful for testing functionality of other features which use LLMs.
pub struct FakeLLM {
}
impl FakeLLM {
fn new() -> FakeLLM {
FakeLLM { }
}
}

#[async_trait]
impl LLM for FakeLLM {
async fn query(input: &str) -> String {
format!("This is a response to: {input}")
}
async fn combine(&self, kubes: &[Kube]) -> Kube {
let mut new_string = String::new();
for kube in kubes {
new_string.push_str(kube.name.as_str());
}
Kube::new(new_string)
}
}

#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn fake_combine_test() {
let kubes = vec![
Kube::new(String::from("water")),
Kube::new(String::from("glass")),
];
let fake_llm = FakeLLM::new();
let response_kube = fake_llm.combine(&kubes).await;
assert_eq!(
String::from("waterglass"),
response_kube.name,
);
}
}
33 changes: 33 additions & 0 deletions backend/src/schema.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
// @generated automatically by Diesel CLI.

diesel::table! {
kube_recipe_lines (recipe_id) {
recipe_id -> Uuid,
input_id -> Uuid,
}
}

diesel::table! {
kube_recipes (id) {
id -> Uuid,
output_id -> Uuid,
}
}

diesel::table! {
kubes (id) {
id -> Uuid,
#[max_length = 255]
name -> Varchar,
}
}

diesel::joinable!(kube_recipe_lines -> kube_recipes (recipe_id));
diesel::joinable!(kube_recipe_lines -> kubes (input_id));
diesel::joinable!(kube_recipes -> kubes (output_id));

diesel::allow_tables_to_appear_in_same_query!(
kube_recipe_lines,
kube_recipes,
kubes,
);
9 changes: 9 additions & 0 deletions diesel.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli

[print_schema]
file = "backend/src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId"]

[migrations_directory]
dir = "migrations"
Empty file added migrations/.keep
Empty file.
6 changes: 6 additions & 0 deletions migrations/00000000000000_diesel_initial_setup/down.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.

DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();
36 changes: 36 additions & 0 deletions migrations/00000000000000_diesel_initial_setup/up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.




-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;

CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
5 changes: 5 additions & 0 deletions migrations/2024-03-02-174331_create_init_db/down.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
-- This file should undo anything in `up.sql`

DROP TABLE kubes_recipe_lines;
DROP TABLE kubes_recipes;
DROP TABLE kubes;
20 changes: 20 additions & 0 deletions migrations/2024-03-02-174331_create_init_db/up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
-- Your SQL goes here

CREATE TABLE kubes (
id uuid primary key,
name varchar(255) not null
);

CREATE INDEX k_i on kubes(name);

CREATE TABLE kube_recipes (
id uuid primary key,
output_id uuid not null references kubes(id)
);

CREATE TABLE kube_recipe_lines (
recipe_id uuid primary key references kube_recipes(id),
input_id uuid not null references kubes(id)
);

CREATE INDEX krl_i ON kube_recipe_lines(recipe_id, input_id);

0 comments on commit a49cfeb

Please sign in to comment.