uwu
This commit is contained in:
parent
81332f441d
commit
4a29844ad4
7 changed files with 546 additions and 152 deletions
26
Cargo.lock
generated
26
Cargo.lock
generated
|
@ -47,6 +47,17 @@ version = "1.0.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "chapolibot"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"frankenstein",
|
||||
"lazy_static",
|
||||
"rand",
|
||||
"regex",
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chunked_transfer"
|
||||
version = "1.4.0"
|
||||
|
@ -417,15 +428,6 @@ dependencies = [
|
|||
"unicode-xid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "telegram_bots"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"frankenstein",
|
||||
"rand",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.3.0"
|
||||
|
@ -510,6 +512,12 @@ dependencies = [
|
|||
"tinyvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-segmentation"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.3"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[package]
|
||||
name = "telegram_bots"
|
||||
name = "chapolibot"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
|
@ -9,3 +9,5 @@ edition = "2021"
|
|||
frankenstein = "0.13"
|
||||
rand = { version = "0.8.5", features = ["small_rng"] }
|
||||
regex = "1"
|
||||
unicode-segmentation = "1.7.1"
|
||||
lazy_static = "1.4.0"
|
4
README.md
Normal file
4
README.md
Normal file
|
@ -0,0 +1,4 @@
|
|||
Le code source de @chapolibot sur Telegram
|
||||
|
||||
Ce code est distribué sous GPLv3, mais n'hésitez pas à me ping pour me dire ce que vous en faites
|
||||
|
|
@ -5,20 +5,23 @@ use frankenstein::{
|
|||
use super::utils;
|
||||
use super::uwuify;
|
||||
use rand::prelude::*;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Hash, Eq, PartialEq, Clone)]
|
||||
#[derive(Eq, PartialEq, Clone)]
|
||||
pub enum Command {
|
||||
Dice,
|
||||
Cookie,
|
||||
Uwu,
|
||||
Brat
|
||||
Brat,
|
||||
Keymash
|
||||
}
|
||||
|
||||
pub static COMMAND_LIST: [Command; 4] = [
|
||||
pub static COMMAND_LIST: [Command; 5] = [
|
||||
Command::Dice,
|
||||
Command::Cookie,
|
||||
Command::Uwu,
|
||||
Command::Brat
|
||||
Command::Brat,
|
||||
Command::Keymash
|
||||
];
|
||||
|
||||
pub fn get_command_attrs<'a>(cmd: &Command) -> utils::CommandAttrs<'a> {
|
||||
|
@ -51,6 +54,13 @@ pub fn get_command_attrs<'a>(cmd: &Command) -> utils::CommandAttrs<'a> {
|
|||
handler: brat as fn(&utils::Context, Message)
|
||||
}
|
||||
}
|
||||
Command::Keymash => {
|
||||
utils::CommandAttrs {
|
||||
name: "keymash",
|
||||
description: "keymash",
|
||||
handler: keymash as fn(&utils::Context, Message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -89,4 +99,61 @@ fn brat(ctx: &utils::Context, message: Message) {
|
|||
if let Some(reply) = message.reply_to_message {
|
||||
utils::send_message(ctx, message.chat.id, brat_strings[rng.gen_range(0..brat_strings.len())], Some(reply.message_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_key() -> String {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("a", 1.0);
|
||||
map.insert("z", 5.0);
|
||||
map.insert("q", 5.0);
|
||||
map.insert("s", 4.0);
|
||||
map.insert("w", 3.0);
|
||||
map.insert("x", 2.0);
|
||||
map.insert("e", 5.0);
|
||||
map.insert("d", 3.0);
|
||||
map.insert("c", 5.0);
|
||||
map.insert("r", 4.0);
|
||||
map.insert("f", 6.0);
|
||||
map.insert("v", 2.0);
|
||||
map.insert("y", 1.0);
|
||||
map.insert("g", 5.0);
|
||||
map.insert("b", 1.0);
|
||||
map.insert("h", 3.0);
|
||||
map.insert("n", 4.0);
|
||||
map.insert("u", 5.0);
|
||||
map.insert("j", 4.0);
|
||||
map.insert("i", 6.0);
|
||||
map.insert("k", 5.0);
|
||||
map.insert("o", 5.0);
|
||||
map.insert("l", 6.0);
|
||||
map.insert("p", 3.0);
|
||||
map.insert("m", 3.0);
|
||||
let total: f64 = map.values().sum();
|
||||
let mut rng = thread_rng();
|
||||
let a: f64 = rng.gen();
|
||||
let mut cumul: f64 = 0.0;
|
||||
for (letter, weight) in map {
|
||||
cumul += weight/total;
|
||||
if cumul > a {
|
||||
return letter.to_string()
|
||||
}
|
||||
}
|
||||
"f".to_string()
|
||||
}
|
||||
|
||||
fn keymash(ctx: &utils::Context, message: Message) {
|
||||
let mut length = 12;
|
||||
|
||||
for e in message.entities.unwrap() {
|
||||
if e.type_field == MessageEntityType::BotCommand {
|
||||
length = (message.text.as_ref().unwrap())[usize::from(e.offset+e.length)..].trim().parse().unwrap_or(12);
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
let mut mashed = "".to_string();
|
||||
for _ in 0..length {
|
||||
mashed += &get_key();
|
||||
}
|
||||
utils::send_message(ctx, message.chat.id, &format!("{}", mashed), Some(message.message_id));
|
||||
}
|
36
src/main.rs
36
src/main.rs
|
@ -1,7 +1,14 @@
|
|||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
use frankenstein::{
|
||||
Api,
|
||||
TelegramApi,
|
||||
GetUpdatesParams,
|
||||
AnswerInlineQueryParams,
|
||||
InlineQueryResult,
|
||||
InlineQueryResultArticle,
|
||||
InputMessageContent,
|
||||
InputTextMessageContent
|
||||
};
|
||||
|
||||
mod commands;
|
||||
|
@ -32,7 +39,8 @@ fn main() {
|
|||
|
||||
utils::init_command_list(&ctx);
|
||||
|
||||
let update_params_builder = GetUpdatesParams::builder().allowed_updates(vec!["message".to_string()]);
|
||||
let update_params_builder = GetUpdatesParams::builder()
|
||||
.allowed_updates(vec!["message".to_string(), "inline_query".to_string()]);
|
||||
let mut update_params = update_params_builder.clone().build();
|
||||
|
||||
loop {
|
||||
|
@ -44,6 +52,30 @@ fn main() {
|
|||
for update in response.result {
|
||||
if let Some(message) = update.message {
|
||||
utils::exec_command(&ctx, message);
|
||||
} else if let Some(inline_query) = update.inline_query {
|
||||
println!("{:?}", inline_query);
|
||||
let params = AnswerInlineQueryParams::builder()
|
||||
.inline_query_id(inline_query.id)
|
||||
.results(vec![InlineQueryResult::Article(InlineQueryResultArticle {
|
||||
description: None,
|
||||
id: "0".to_string(),
|
||||
title: "Uwuify".to_string(),
|
||||
hide_url: None,
|
||||
url: None,
|
||||
input_message_content: InputMessageContent::Text(InputTextMessageContent {
|
||||
message_text: uwuify::uwuify(inline_query.query),
|
||||
parse_mode: None,
|
||||
entities: None,
|
||||
disable_web_page_preview: None,
|
||||
}),
|
||||
reply_markup: None,
|
||||
thumb_height: None,
|
||||
thumb_width: None,
|
||||
thumb_url: None,
|
||||
})]).build();
|
||||
if let Err(err) = ctx.api.answer_inline_query(¶ms) {
|
||||
println!("Failed to send inline message: {:?}", err);
|
||||
};
|
||||
}
|
||||
update_params = update_params_builder.clone().offset(update.update_id + 1).build();
|
||||
}
|
||||
|
@ -59,4 +91,4 @@ fn main() {
|
|||
eprintln!("Failed to get me: {:?}", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
59
src/utils.rs
59
src/utils.rs
|
@ -22,23 +22,50 @@ pub struct CommandAttrs<'a> {
|
|||
}
|
||||
|
||||
pub fn send_message(ctx: &Context, chat_id: i64, text: &str, message_id_to_repy: Option<i32>) {
|
||||
let send_message_params: SendMessageParams;
|
||||
|
||||
if let Some(id) = message_id_to_repy {
|
||||
send_message_params = SendMessageParams::builder()
|
||||
.chat_id(chat_id)
|
||||
.text(text)
|
||||
.reply_to_message_id(id)
|
||||
.build();
|
||||
} else {
|
||||
send_message_params = SendMessageParams::builder()
|
||||
.chat_id(chat_id)
|
||||
.text(text)
|
||||
.build();
|
||||
}
|
||||
let mut text_still_to_send = text;
|
||||
loop {
|
||||
if text_still_to_send.len() == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
if let Err(err) = ctx.api.send_message(&send_message_params) {
|
||||
println!("Failed to send message: {:?}", err);
|
||||
let mut text_to_send = text_still_to_send;
|
||||
if text_to_send.len() > 4096 {
|
||||
let mut i = 4096;
|
||||
loop {
|
||||
if i == 0 {
|
||||
break;
|
||||
}
|
||||
if text_still_to_send.is_char_boundary(i) {
|
||||
println!("{}", i);
|
||||
(text_to_send, text_still_to_send) = text_still_to_send.split_at(i);
|
||||
break
|
||||
}
|
||||
i -= 1;
|
||||
}
|
||||
|
||||
} else {
|
||||
text_to_send = text_still_to_send;
|
||||
text_still_to_send = &"";
|
||||
}
|
||||
|
||||
let send_message_params: SendMessageParams;
|
||||
|
||||
if let Some(id) = message_id_to_repy {
|
||||
send_message_params = SendMessageParams::builder()
|
||||
.chat_id(chat_id)
|
||||
.text(text_to_send)
|
||||
.reply_to_message_id(id)
|
||||
.build();
|
||||
} else {
|
||||
send_message_params = SendMessageParams::builder()
|
||||
.chat_id(chat_id)
|
||||
.text(text_to_send)
|
||||
.build();
|
||||
}
|
||||
|
||||
if let Err(err) = ctx.api.send_message(&send_message_params) {
|
||||
println!("Failed to send message: {:?}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
492
src/uwuify.rs
492
src/uwuify.rs
|
@ -1,147 +1,401 @@
|
|||
use regex::{ Captures, Regex };
|
||||
use rand::prelude::*;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
static UWU_PROBA: f64 = 0.5;
|
||||
static UWU_PROBA: f64 = 0.3;
|
||||
static STUTTER_PROBA: f64 = 0.2;
|
||||
static NYA_PROBA: f64 = 1.0;
|
||||
static R_TO_W_PROBA: f64 = 0.5;
|
||||
static UH_PROBA: f64 = 0.1;
|
||||
|
||||
fn replace_open_ponct(txt: String) -> String {
|
||||
let open_ponct = "。・:*:・゚★₊。・:*:・゚☆ ";
|
||||
let re = Regex::new(r"[(<{«]").unwrap();
|
||||
re.replace_all(&txt, open_ponct).to_string()
|
||||
lazy_static! {
|
||||
static ref RE_UWU: regex::Regex = Regex::new(r"(?i)(ou|owo|uwu|o+|u+)").unwrap();
|
||||
static ref RE_R_W: regex::Regex = Regex::new(r"(?i)(r)").unwrap();
|
||||
static ref RE_STUTTER: regex::Regex = Regex::new(r"(?i)\b([bcdfgjkptv])").unwrap();
|
||||
static ref RE_NYA: regex::Regex = Regex::new(r"(?i)(n)([aiou])").unwrap();
|
||||
static ref RE_OUI: regex::Regex = Regex::new(r"(?i)\b(oui|ui|yup|yep)").unwrap();
|
||||
static ref RE_NON: regex::Regex = Regex::new(r"(?i)(n)(on|ope|an)").unwrap();
|
||||
static ref RE_OPEN: regex::Regex = Regex::new(r"[(<{«]").unwrap();
|
||||
static ref RE_CLOSE: regex::Regex = Regex::new(r"[)>}»]").unwrap();
|
||||
static ref RE_FACE: regex::Regex = Regex::new(r"[.,?!]").unwrap();
|
||||
static ref RE_UH: regex::Regex = Regex::new(r" ").unwrap();
|
||||
static ref RE_OI_TO_WA: regex::Regex = Regex::new(r"(?i)^(qu|m|t)(oi)$").unwrap();
|
||||
}
|
||||
|
||||
fn replace_close_ponct(txt: String) -> String {
|
||||
let close_ponct = " ☆゚・:*:・。₊★゚・:*:・。";
|
||||
let re = Regex::new(r"[)>}»]").unwrap();
|
||||
re.replace_all(&txt, close_ponct).to_string()
|
||||
#[derive(Debug)]
|
||||
struct Message {
|
||||
tokens: Vec<Token>
|
||||
}
|
||||
|
||||
fn replace_face_ponct(txt: String) -> String {
|
||||
let mut rng = thread_rng();
|
||||
let faces = vec![
|
||||
"(・`ω´・)",
|
||||
// ";;w;;",
|
||||
"owo",
|
||||
"UwU",
|
||||
">w<",
|
||||
"^w^",
|
||||
"(* ^ ω ^)",
|
||||
"(⌒ω⌒)",
|
||||
"ヽ(*・ω・)ノ",
|
||||
"(○´∀`○)",
|
||||
"(○・ω・○)",
|
||||
"\(^▽^)/",
|
||||
"nya~",
|
||||
":3"
|
||||
];
|
||||
let re = Regex::new(r"[.,?!]").unwrap();
|
||||
re.replace_all(&txt, |_: &Captures| {
|
||||
String::from(" ") + faces[rng.gen_range(0..faces.len())]
|
||||
}).to_string()
|
||||
#[derive(Debug)]
|
||||
enum Token {
|
||||
WordToken(WordToken),
|
||||
PoncToken(PoncToken)
|
||||
}
|
||||
|
||||
fn uwu_injector(txt: String) -> String {
|
||||
let mut rng = thread_rng();
|
||||
let re = Regex::new(r"(?i)(ou|owo|uwu|o|u|r)").unwrap();
|
||||
re.replace_all(&txt, |cap: &Captures| {
|
||||
let c = cap.get(1).unwrap().as_str();
|
||||
if rng.gen_bool(UWU_PROBA) {
|
||||
match c.to_lowercase().as_str() {
|
||||
"o" => { String::from(c) + "wo" },
|
||||
"u" => { String::from(c) + "wu" },
|
||||
"r" => { String::from("w") },
|
||||
"owo" => { String::from(c) },
|
||||
"uwu" => { String::from(c) },
|
||||
"ou" => { match c {
|
||||
"ou" => String::from("uwu"),
|
||||
"Ou" => String::from("Uwu"),
|
||||
"OU" => String::from("UWU"),
|
||||
_ => panic!()
|
||||
}},
|
||||
a => { String::from(a) }
|
||||
#[derive(Debug)]
|
||||
struct WordToken {
|
||||
text: String,
|
||||
can_be_uwued: bool,
|
||||
can_be_stuttered: bool,
|
||||
can_be_r_to_w: bool,
|
||||
can_be_nyaied: bool,
|
||||
can_be_oui: bool,
|
||||
can_be_non: bool,
|
||||
can_be_oi_to_wa: bool
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct PoncToken {
|
||||
text: String,
|
||||
can_be_open: bool,
|
||||
can_be_close: bool,
|
||||
can_be_face: bool,
|
||||
can_be_uh: bool
|
||||
}
|
||||
|
||||
impl Message {
|
||||
fn parse(txt: &str) -> Message {
|
||||
let raw_words = txt.unicode_words().collect::<Vec<&str>>();
|
||||
let substrings = txt.split_word_bounds().collect::<Vec<&str>>();
|
||||
let mut tokens: Vec<Token> = Vec::new();
|
||||
for str in substrings {
|
||||
if let Some(_) = raw_words.iter().find(|&&x| x.eq(str)) {
|
||||
tokens.push(Token::WordToken(WordToken {
|
||||
text: str.to_string(),
|
||||
can_be_uwued: true,
|
||||
can_be_stuttered: true,
|
||||
can_be_r_to_w: true,
|
||||
can_be_nyaied: true,
|
||||
can_be_oui: true,
|
||||
can_be_non: true,
|
||||
can_be_oi_to_wa: true
|
||||
}));
|
||||
} else {
|
||||
tokens.push(Token::PoncToken(PoncToken {
|
||||
text: str.to_string(),
|
||||
can_be_open: true,
|
||||
can_be_close: true,
|
||||
can_be_face: true,
|
||||
can_be_uh: true
|
||||
}));
|
||||
}
|
||||
} else {
|
||||
c.to_string()
|
||||
}
|
||||
}).to_string()
|
||||
}
|
||||
|
||||
fn stutter(txt: String) -> String {
|
||||
let mut rng = thread_rng();
|
||||
let re = Regex::new(r"(?i)\b([bcdfgjkptv])").unwrap();
|
||||
re.replace_all(&txt, |cap: &Captures| {
|
||||
let c = cap.get(1).unwrap().as_str();
|
||||
if rng.gen_bool(STUTTER_PROBA) {
|
||||
c.to_string() + "-" + c
|
||||
} else {
|
||||
c.to_string()
|
||||
Message {
|
||||
tokens: tokens
|
||||
}
|
||||
}).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn nyaifier(txt: String) -> String {
|
||||
let mut rng = thread_rng();
|
||||
let re = Regex::new(r"(?i)(n)([aiou])").unwrap();
|
||||
re.replace_all(&txt, |cap: &Captures| {
|
||||
let n = cap.get(1).unwrap().as_str();
|
||||
let c = cap.get(2).unwrap().as_str();
|
||||
if rng.gen_bool(NYA_PROBA) {
|
||||
n.to_string() + match c {
|
||||
"A" | "I" | "O" | "U" => "Y",
|
||||
_ => "y"
|
||||
} + c
|
||||
} else {
|
||||
n.to_string() + c
|
||||
}
|
||||
}).to_string()
|
||||
}
|
||||
|
||||
fn oui_replacer(txt: String) -> String {
|
||||
let re = Regex::new(r"(?i)(o)(ui)|(u)(i)|(y)(up|ep)").unwrap();
|
||||
re.replace_all(&txt, |cap: &Captures| {
|
||||
let c1 = cap.get(1).unwrap().as_str();
|
||||
let c2 = cap.get(2).unwrap().as_str();
|
||||
match c2 {
|
||||
"UI" | "I" | "UP" | "EP" => "WI",
|
||||
_ => {
|
||||
match c1 {
|
||||
"O" | "U" | "Y" => "Wi",
|
||||
_ => "wi"
|
||||
fn uwuify(&mut self) -> &mut Self {
|
||||
let mut i = 0;
|
||||
loop {
|
||||
if i % 100 == 0 { println!("{}", self.tokens.len()); }
|
||||
if i >= self.tokens.len() { break; }
|
||||
match &mut self.tokens[i] {
|
||||
Token::PoncToken(t) => {
|
||||
t.replace_open_ponct()
|
||||
.replace_close_ponct()
|
||||
.replace_face_ponct()
|
||||
.uh_injector();
|
||||
},
|
||||
Token::WordToken(t) => {
|
||||
t.oui_replacer()
|
||||
.non_replacer()
|
||||
.nyaifier()
|
||||
.stutter()
|
||||
.oi_to_wa()
|
||||
.uwu_injector()
|
||||
.r_to_w();
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}).to_string()
|
||||
self
|
||||
}
|
||||
|
||||
fn to_string(&self) -> String {
|
||||
let mut ret = "".to_string();
|
||||
for token in &self.tokens {
|
||||
ret += match token {
|
||||
Token::WordToken(t) => &t.text,
|
||||
Token::PoncToken(t) => &t.text
|
||||
};
|
||||
}
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
fn non_replacer(txt: String) -> String {
|
||||
let re = Regex::new(r"(?i)(n)(on|ope|an)").unwrap();
|
||||
re.replace_all(&txt, |cap: &Captures| {
|
||||
let c1 = cap.get(1).unwrap().as_str();
|
||||
let c2 = cap.get(2).unwrap().as_str();
|
||||
match c2 {
|
||||
"ON" | "OPE" | "AN" => "NYON",
|
||||
_ => {
|
||||
match c1 {
|
||||
"N" => "Nyon",
|
||||
_ => "nyon"
|
||||
impl WordToken {
|
||||
fn uwu_injector(&mut self) -> &mut Self {
|
||||
if self.can_be_uwued && self.text.len() > 4 {
|
||||
let mut rng = thread_rng();
|
||||
let old_text = self.text.clone();
|
||||
self.text = RE_UWU.replace_all(&self.text, |cap: &Captures| {
|
||||
let c = cap.get(1).unwrap().as_str();
|
||||
if rng.gen_bool(UWU_PROBA) {
|
||||
match c.to_lowercase().as_str() {
|
||||
"o" => { String::from(c) + "wo" },
|
||||
"u" => { String::from(c) + "wu" },
|
||||
"owo" => { String::from(c) },
|
||||
"uwu" => { String::from(c) },
|
||||
"ou" => { match c {
|
||||
"ou" => String::from("uwu"),
|
||||
"Ou" => String::from("Uwu"),
|
||||
"OU" => String::from("UWU"),
|
||||
_ => panic!()
|
||||
}},
|
||||
a => {
|
||||
if a.to_lowercase().eq(&"o".repeat(a.len())) || a.to_lowercase().eq(&"u".repeat(a.len())) {
|
||||
match &a.chars().next().unwrap() {
|
||||
'o' => String::from("owo"),
|
||||
'O' => String::from("Owo"),
|
||||
'u' => String::from("uwu"),
|
||||
'U' => String::from("Uwu"),
|
||||
_ => { panic!(); }
|
||||
}
|
||||
} else {
|
||||
String::from(a)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
c.to_string()
|
||||
}
|
||||
}).to_string();
|
||||
if self.text.ne(&old_text) {
|
||||
self.can_be_r_to_w = false;
|
||||
self.can_be_oi_to_wa = false;
|
||||
}
|
||||
self.can_be_uwued = false;
|
||||
}
|
||||
}).to_string()
|
||||
self
|
||||
}
|
||||
|
||||
fn r_to_w(&mut self) -> &mut Self {
|
||||
if self.can_be_r_to_w {
|
||||
let mut rng = thread_rng();
|
||||
let old_text = self.text.clone();
|
||||
self.text = RE_R_W.replace_all(&self.text, |cap: &Captures| {
|
||||
let c = cap.get(1).unwrap().as_str();
|
||||
if rng.gen_bool(R_TO_W_PROBA) {
|
||||
match c {
|
||||
"r" => { String::from("w") },
|
||||
"R" => { String::from("W") },
|
||||
a => { String::from(a) }
|
||||
}
|
||||
} else {
|
||||
c.to_string()
|
||||
}
|
||||
}).to_string();
|
||||
if self.text.ne(&old_text) {
|
||||
self.can_be_oi_to_wa = false;
|
||||
self.can_be_uwued = false;
|
||||
}
|
||||
self.can_be_r_to_w = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn oi_to_wa(&mut self) -> &mut Self {
|
||||
if self.can_be_oi_to_wa {
|
||||
let old_text = self.text.clone();
|
||||
self.text = RE_OI_TO_WA.replace_all(&self.text, |cap: &Captures| {
|
||||
let c1 = cap.get(1).unwrap().as_str();
|
||||
let c2 = cap.get(2).unwrap().as_str();
|
||||
(match c1 {
|
||||
"Qu" | "QU" => { String::from("Q") },
|
||||
"qU" | "qu" => { String::from("q") },
|
||||
a => { String::from(a) }
|
||||
} +
|
||||
match c2 {
|
||||
"OI" => "WA",
|
||||
"Oi" => "Wa",
|
||||
"oi" => "wa",
|
||||
a => a
|
||||
})
|
||||
}).to_string();
|
||||
if self.text.ne(&old_text) {
|
||||
self.can_be_uwued = false;
|
||||
self.can_be_oi_to_wa = false;
|
||||
}
|
||||
self.can_be_oi_to_wa = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn stutter(&mut self) -> &mut Self {
|
||||
if self.can_be_stuttered {
|
||||
let mut rng = thread_rng();
|
||||
self.text = RE_STUTTER.replace_all(&self.text, |cap: &Captures| {
|
||||
let c = cap.get(1).unwrap().as_str();
|
||||
if rng.gen_bool(STUTTER_PROBA) {
|
||||
c.to_string() + "-" + c
|
||||
} else {
|
||||
c.to_string()
|
||||
}
|
||||
}).to_string();
|
||||
self.can_be_stuttered = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn nyaifier(&mut self) -> &mut Self {
|
||||
if self.can_be_nyaied {
|
||||
let mut rng = thread_rng();
|
||||
self.text = RE_NYA.replace_all(&self.text, |cap: &Captures| {
|
||||
let n = cap.get(1).unwrap().as_str();
|
||||
let c = cap.get(2).unwrap().as_str();
|
||||
if rng.gen_bool(NYA_PROBA) {
|
||||
n.to_string() + match c {
|
||||
"A" | "I" | "O" | "U" => "Y",
|
||||
_ => "y"
|
||||
} + c
|
||||
} else {
|
||||
n.to_string() + c
|
||||
}
|
||||
}).to_string();
|
||||
self.can_be_nyaied = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn oui_replacer(&mut self) -> &mut Self {
|
||||
if self.can_be_oui {
|
||||
let old_text = self.text.clone();
|
||||
self.text = RE_OUI.replace_all(&self.text, |cap: &Captures| {
|
||||
let c1 = cap.get(1).unwrap().as_str();
|
||||
match &c1[1..] {
|
||||
"UI" | "I" | "UP" | "EP" => "VWI",
|
||||
_ => {
|
||||
match c1 {
|
||||
"O" | "U" | "Y" => "Vwi",
|
||||
_ => "vwi"
|
||||
}
|
||||
}
|
||||
}
|
||||
}).to_string();
|
||||
if self.text.ne(&old_text) {
|
||||
self.can_be_uwued = false;
|
||||
}
|
||||
self.can_be_oui = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn non_replacer(&mut self) -> &mut Self {
|
||||
if self.can_be_non {
|
||||
let old_text = self.text.clone();
|
||||
self.text = RE_NON.replace_all(&self.text, |cap: &Captures| {
|
||||
let c1 = cap.get(1).unwrap().as_str();
|
||||
let c2 = cap.get(2).unwrap().as_str();
|
||||
match c2 {
|
||||
"ON" | "OPE" | "AN" => "NYON",
|
||||
_ => {
|
||||
match c1 {
|
||||
"N" => "Nyon",
|
||||
_ => "nyon"
|
||||
}
|
||||
}
|
||||
}
|
||||
}).to_string();
|
||||
if self.text.ne(&old_text) {
|
||||
self.can_be_uwued = false;
|
||||
}
|
||||
self.can_be_non = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl PoncToken {
|
||||
fn replace_open_ponct(&mut self) -> &mut Self {
|
||||
if self.can_be_open {
|
||||
let open_ponct = "。・:*:・゚★₊。・:*:・゚☆ ";
|
||||
let old_text = self.text.clone();
|
||||
self.text = RE_OPEN.replace_all(&self.text, open_ponct).to_string();
|
||||
if self.text.ne(&old_text) {
|
||||
self.can_be_close = false;
|
||||
self.can_be_face = false;
|
||||
self.can_be_uh = false;
|
||||
}
|
||||
self.can_be_open = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn replace_close_ponct(&mut self) -> &mut Self {
|
||||
if self.can_be_close {
|
||||
let close_ponct = " ☆゚・:*:・。₊★゚・:*:・。";
|
||||
let old_text = self.text.clone();
|
||||
self.text = RE_CLOSE.replace_all(&self.text, close_ponct).to_string();
|
||||
if self.text.ne(&old_text) {
|
||||
self.can_be_open = false;
|
||||
self.can_be_face = false;
|
||||
self.can_be_uh = false;
|
||||
}
|
||||
self.can_be_close = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn replace_face_ponct(&mut self) -> &mut Self {
|
||||
if self.can_be_face {
|
||||
let mut rng = thread_rng();
|
||||
let faces = vec![
|
||||
" (・`ω´・)",
|
||||
// ";;w;;",
|
||||
" owo",
|
||||
" UwU",
|
||||
" >w<",
|
||||
" ^w^",
|
||||
" (* ^ ω ^)",
|
||||
" (⌒ω⌒)",
|
||||
" ヽ(*・ω・)ノ",
|
||||
" (○´∀`○)",
|
||||
" (○・ω・○)",
|
||||
" \(^▽^)/",
|
||||
" nya~",
|
||||
" :3",
|
||||
"~"
|
||||
];
|
||||
let old_text = self.text.clone();
|
||||
self.text = RE_FACE.replace_all(&self.text, |_: &Captures| {
|
||||
String::from(faces[rng.gen_range(0..faces.len())])
|
||||
}).to_string();
|
||||
if self.text.ne(&old_text) {
|
||||
self.can_be_open = false;
|
||||
self.can_be_close = false;
|
||||
self.can_be_uh = false;
|
||||
}
|
||||
self.can_be_face = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn uh_injector (&mut self) -> &mut Self {
|
||||
if self.can_be_uh {
|
||||
let mut rng = thread_rng();
|
||||
let old_text = self.text.clone();
|
||||
self.text = RE_UH.replace_all(&self.text, |_: &Captures| {
|
||||
if rng.gen_bool(UH_PROBA) {
|
||||
" uh... "
|
||||
} else {
|
||||
" "
|
||||
}
|
||||
}).to_string();
|
||||
if self.text.ne(&old_text) {
|
||||
self.can_be_open = false;
|
||||
self.can_be_close = false;
|
||||
self.can_be_face = false;
|
||||
}
|
||||
self.can_be_uh = false;
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn uwuify(txt: String) -> String {
|
||||
let mut text = txt;
|
||||
println!("{:?}", text);
|
||||
text = replace_open_ponct(text);
|
||||
text = replace_close_ponct(text);
|
||||
text = replace_face_ponct(text);
|
||||
text = oui_replacer(text);
|
||||
text = non_replacer(text);
|
||||
text = uwu_injector(text);
|
||||
text = stutter(text);
|
||||
text = nyaifier(text);
|
||||
|
||||
return text
|
||||
let text = txt;
|
||||
let mut a = Message::parse(&text);
|
||||
return a.uwuify().to_string();
|
||||
}
|
Loading…
Add table
Reference in a new issue