mirror of
https://github.com/bvanroll/unnamed_chatgpt_project.git
synced 2025-08-28 11:32:41 +00:00
maybe build on pc?
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,6 +2,7 @@
|
||||
# will have compiled files and executables
|
||||
main/target/
|
||||
data_prep/target/
|
||||
finalise_from_context/target/
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||
|
12
finalise_from_context/Cargo.toml
Normal file
12
finalise_from_context/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "unnamed_chatgpt_project"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0.91"
|
||||
rfd = "0.10.0"
|
||||
rust-bert = "0.20.0"
|
12
finalise_from_context/finalise_from_context.iml
Normal file
12
finalise_from_context/finalise_from_context.iml
Normal file
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="RUST_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
98
finalise_from_context/src/main.rs
Normal file
98
finalise_from_context/src/main.rs
Normal file
@@ -0,0 +1,98 @@
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::io::{BufReader, Read, Write};
|
||||
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use rust_bert::bert::{BertConfigResources, BertModelResources, BertVocabResources};
|
||||
use rust_bert::pipelines::common::ModelType;
|
||||
use rust_bert::pipelines::question_answering::Answer;
|
||||
use rust_bert::pipelines::question_answering::{
|
||||
QaInput, QuestionAnsweringConfig, QuestionAnsweringModel,
|
||||
};
|
||||
use rust_bert::resources::RemoteResource;
|
||||
|
||||
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct Human {
|
||||
firstName: String,
|
||||
lastName: String,
|
||||
gender: String,
|
||||
age: String,
|
||||
country: String,
|
||||
job: String,
|
||||
bio: String,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
//load in the file with bio's and names
|
||||
let current_path = std::env::current_dir().unwrap();
|
||||
let res = rfd::FileDialog::new().set_directory(¤t_path).pick_file().unwrap();
|
||||
let mut file = File::open(res.as_path()).unwrap();
|
||||
let mut json_string:String = String::new();
|
||||
file.read_to_string(&mut json_string).unwrap();
|
||||
let mut Humans: Vec<Human> = serde_json::from_str(&json_string).unwrap();
|
||||
//prep final file
|
||||
let save_res = rfd::FileDialog::new().set_directory(¤t_path).save_file().unwrap();
|
||||
let mut i = 0;
|
||||
let mut l = &Humans.len().clone();
|
||||
println!("there are {} humans to process", l - i);
|
||||
|
||||
for mut human in &mut Humans {
|
||||
let (gender, age, country, job) = getHumanFromContext(human.bio.clone(), human.firstName.clone());
|
||||
human.gender = gender;
|
||||
human.age = age;
|
||||
human.country = country;
|
||||
human.job = job;
|
||||
println!("just did {} at index {}", human.firstName.clone(), i);
|
||||
println!("There are {} humans left to process", l - i);
|
||||
i = i+1;
|
||||
}
|
||||
|
||||
let serialized: String = serde_json::to_string(&Humans).unwrap();
|
||||
let mut file = File::create(save_res.as_path()).unwrap();
|
||||
file.write_all(serialized.as_bytes()).expect("oopsie");
|
||||
}
|
||||
|
||||
|
||||
fn getHumanFromContext(context: String, firstName: String) -> (String, String, String, String) {
|
||||
//TODO use the other ai to get answers from a given context
|
||||
let bertconfig = QuestionAnsweringConfig::new(
|
||||
ModelType::Bert,
|
||||
RemoteResource::from_pretrained(BertModelResources::BERT_QA),
|
||||
RemoteResource::from_pretrained(BertConfigResources::BERT_QA),
|
||||
RemoteResource::from_pretrained(BertVocabResources::BERT_QA),
|
||||
None, //merges resource only relevant with ModelType::Roberta
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
);
|
||||
let mut model = QuestionAnsweringModel::new(bertconfig).unwrap();
|
||||
let mut genderQuestion = QaInput {
|
||||
question: format!("What is {}'s gender?", firstName),
|
||||
context: context.clone()
|
||||
};
|
||||
let mut ageQuestion = QaInput {
|
||||
question: format!("What is {}'s age?", firstName),
|
||||
context: context.clone()
|
||||
};
|
||||
let mut countryQuestion = QaInput {
|
||||
question: format!("Where does {} live?", firstName),
|
||||
context: context.clone()
|
||||
};
|
||||
let mut jobQuestion = QaInput {
|
||||
question: format!("What is {}'s job?", firstName),
|
||||
context: context.clone()
|
||||
};
|
||||
|
||||
let mut answers = model.predict(&[genderQuestion, ageQuestion, countryQuestion, jobQuestion], 1, 32);
|
||||
let mut looper = answers.iter();
|
||||
let mut gender = looper.next().unwrap().first().unwrap().answer.clone();
|
||||
let mut age = looper.next().unwrap().first().unwrap().answer.clone();
|
||||
let mut country= looper.next().unwrap().first().unwrap().answer.clone();
|
||||
let mut job = looper.next().unwrap().first().unwrap().answer.clone();
|
||||
|
||||
|
||||
return (gender, age, country, job)
|
||||
}
|
@@ -5,7 +5,9 @@ use std::io::{BufReader, Read, Write};
|
||||
use async_openai::{Client, types::{CreateCompletionRequestArgs}};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use rand::Rng;
|
||||
use rust_bert::roberta::RobertaForQuestionAnswering;
|
||||
|
||||
|
||||
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct MiniHuman {
|
||||
@@ -25,6 +27,8 @@ struct Human {
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
|
||||
|
||||
let current_path = std::env::current_dir().unwrap();
|
||||
let res = rfd::FileDialog::new().set_directory(¤t_path).pick_file().unwrap();
|
||||
let mut file = File::open(res.as_path()).unwrap();
|
||||
@@ -35,6 +39,7 @@ async fn main() {
|
||||
let save_res = rfd::FileDialog::new().set_directory(¤t_path).save_file().unwrap();
|
||||
let mut client = Client::new();
|
||||
while MiniHumans.len() > 1 {
|
||||
println!("still got {} to go", MiniHumans.len());
|
||||
let (mut firstName, mut firstGender) = getRngName(&mut MiniHumans);
|
||||
let (mut lastName, mut lastGender) = getRngName(&mut MiniHumans);
|
||||
if firstName == "" || lastName == "" || (firstGender == "" && lastGender == "") { continue }
|
||||
@@ -45,7 +50,6 @@ async fn main() {
|
||||
Ok(h) => Humans.push(h),
|
||||
Err(e) => println!("some err occured: {:?}", e.to_string()),
|
||||
};
|
||||
break;
|
||||
}
|
||||
let serialized: String = serde_json::to_string(&Humans).unwrap();
|
||||
let mut file = File::create(save_res.as_path()).unwrap();
|
||||
@@ -73,8 +77,12 @@ async fn getHuman(client: &mut Client, firstName: String, lastName: String, gend
|
||||
let res = client.completions().create(request).await;
|
||||
let response = String::from(format!("{}", res?.choices.first().unwrap().text));
|
||||
|
||||
let (finalGender, age, country, job) = getHumanFromContext(response.clone());
|
||||
|
||||
//let (finalGender, age, country, job) = getHumanFromContext(response.clone(), firstName.clone());
|
||||
//NOTE rust bert won't function async, reading these in in a final rust project instead of fucking around with mixing stuff that has huge warning signs when running in async and async programming
|
||||
let finalGender = "".to_string();
|
||||
let age = "".to_string();
|
||||
let country = "".to_string();
|
||||
let job = "".to_string();
|
||||
return Ok(Human{
|
||||
firstName: firstName,
|
||||
lastName: lastName,
|
||||
@@ -86,13 +94,23 @@ async fn getHuman(client: &mut Client, firstName: String, lastName: String, gend
|
||||
});
|
||||
}
|
||||
//returns in order: gender, age, country, job
|
||||
fn getHumanFromContext(context: String, firstName: String) -> (String, String, String, String) {
|
||||
//TODO use the other ai to get answers from a given context
|
||||
let qa_model = QuestionAnsweringModel::new(Default::default())?;
|
||||
let gender = String::from(format!("What is {}'s gender?", firstname));
|
||||
let age = String::from(format!("What is {}'s age?", firstName));
|
||||
let country= String::from(format!("Where does {} live?", firstName));
|
||||
let job = String::from(format!("What is {}'s job?", firstName));
|
||||
let answers = qa_model.predict(&[QaInput { question, context }], 1, 32);
|
||||
return ("".to_string(), "".to_string(), "".to_string(), "".to_string())
|
||||
}
|
||||
// fn getHumanFromContext(context: String, firstName: String) -> (String, String, String, String) {
|
||||
// //
|
||||
// let bertconfig = QuestionAnsweringConfig::new(
|
||||
// ModelType::Bert,
|
||||
// RemoteResource::from_pretrained(BertModelResources::BERT_QA),
|
||||
// RemoteResource::from_pretrained(BertConfigResources::BERT_QA),
|
||||
// RemoteResource::from_pretrained(BertVocabResources::BERT_QA),
|
||||
// None, //merges resource only relevant with ModelType::Roberta
|
||||
// false,
|
||||
// false,
|
||||
// None,
|
||||
// );
|
||||
// let mut model = QuestionAnsweringModel::new(bertconfig).unwrap();
|
||||
// let gender = String::from(format!("What is {}'s gender?", firstName));
|
||||
// let age = String::from(format!("What is {}'s age?", firstName));
|
||||
// let country= String::from(format!("Where does {} live?", firstName));
|
||||
// let job = String::from(format!("What is {}'s job?", firstName));
|
||||
// let answers = model.predict(&[QaInput { question: gender, context: context }], 1, 32);
|
||||
// return ("".to_string(), "".to_string(), "".to_string(), "".to_string())
|
||||
// }
|
Reference in New Issue
Block a user