mirror of
https://framagit.org/veretcle/scootaloo.git
synced 2025-07-20 17:11:19 +02:00
I consider this a good compromise between all async (that works but is pretty complex and honestly a bit useless) and nothing async that is not the most optimal way to deal with things as reqwest and egg-mode are async by nature
This commit is contained in:
107
src/lib.rs
107
src/lib.rs
@@ -1,17 +1,13 @@
|
||||
// std
|
||||
use std::{
|
||||
path::Path,
|
||||
borrow::Cow,
|
||||
collections::HashMap,
|
||||
io::{stdin, copy},
|
||||
io::stdin,
|
||||
fmt,
|
||||
fs::{read_to_string, write, create_dir_all, File, remove_file},
|
||||
fs::{read_to_string, write},
|
||||
error::Error,
|
||||
};
|
||||
|
||||
// futures
|
||||
use futures::executor::block_on;
|
||||
|
||||
// toml
|
||||
use serde::Deserialize;
|
||||
|
||||
@@ -36,11 +32,20 @@ use elefren::{
|
||||
};
|
||||
|
||||
// reqwest
|
||||
use reqwest::Client;
|
||||
use reqwest::Url;
|
||||
|
||||
// tokio
|
||||
use tokio::{
|
||||
io::copy,
|
||||
fs::{File, create_dir_all, remove_file},
|
||||
};
|
||||
|
||||
// htmlescape
|
||||
use htmlescape::decode_html;
|
||||
|
||||
// log
|
||||
use log::{info, warn, error, debug};
|
||||
|
||||
/**********
|
||||
* Generic usage functions
|
||||
***********/
|
||||
@@ -52,18 +57,19 @@ fn read_state(s: &str) -> Option<u64> {
|
||||
let state = read_to_string(s);
|
||||
|
||||
if let Ok(s) = state {
|
||||
debug!("Last Tweet ID (from file): {}", &s);
|
||||
return s.parse::<u64>().ok();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Write last treated tweet id to a file
|
||||
/// Writes last treated tweet id to a file
|
||||
fn write_state(f: &str, s: u64) -> Result<(), std::io::Error> {
|
||||
write(f, format!("{}", s))
|
||||
}
|
||||
|
||||
/// Get twitter oauth2 token
|
||||
/// Gets twitter oauth2 token
|
||||
fn get_oauth2_token(config: &Config) -> Token {
|
||||
let con_token = KeyPair::new(String::from(&config.twitter.consumer_key), String::from(&config.twitter.consumer_secret));
|
||||
let access_token = KeyPair::new(String::from(&config.twitter.access_key), String::from(&config.twitter.access_secret));
|
||||
@@ -74,17 +80,17 @@ fn get_oauth2_token(config: &Config) -> Token {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get twitter user timeline
|
||||
fn get_user_timeline(config: &Config, token: Token, lid: Option<u64>) -> Result<Vec<Tweet>, Box<dyn Error>> {
|
||||
/// Gets twitter user timeline
|
||||
async fn get_user_timeline(config: &Config, token: Token, lid: Option<u64>) -> Result<Vec<Tweet>, Box<dyn Error>> {
|
||||
// fix the page size to 200 as it is the maximum Twitter authorizes
|
||||
let (_timeline, feed) = block_on(user_timeline(UserID::from(String::from(&config.twitter.username)), true, false, &token)
|
||||
let (_timeline, feed) = user_timeline(UserID::from(String::from(&config.twitter.username)), true, false, &token)
|
||||
.with_page_size(200)
|
||||
.older(lid))?;
|
||||
.older(lid).await?;
|
||||
|
||||
Ok(feed.to_vec())
|
||||
}
|
||||
|
||||
/// decode urls from UrlEntities
|
||||
/// Decodes urls from UrlEntities
|
||||
fn decode_urls(urls: &Vec<UrlEntity>) -> HashMap<String, String> {
|
||||
let mut decoded_urls = HashMap::new();
|
||||
|
||||
@@ -98,6 +104,8 @@ fn decode_urls(urls: &Vec<UrlEntity>) -> HashMap<String, String> {
|
||||
decoded_urls
|
||||
}
|
||||
|
||||
/// Decodes the Twitter mention to something that will make sense once Twitter has joined the
|
||||
/// Fediverse
|
||||
fn twitter_mentions(ums: &Vec<MentionEntity>) -> HashMap<String, String> {
|
||||
let mut decoded_mentions = HashMap::new();
|
||||
|
||||
@@ -108,24 +116,24 @@ fn twitter_mentions(ums: &Vec<MentionEntity>) -> HashMap<String, String> {
|
||||
decoded_mentions
|
||||
}
|
||||
|
||||
/// Retrieve a single media from a tweet and store it in a temporary file
|
||||
fn get_tweet_media(m: &MediaEntity, t: &str) -> Result<String, Box<dyn Error>> {
|
||||
/// Retrieves a single media from a tweet and store it in a temporary file
|
||||
async fn get_tweet_media(m: &MediaEntity, t: &str) -> Result<String, Box<dyn Error>> {
|
||||
match m.media_type {
|
||||
MediaType::Photo => {
|
||||
return cache_media(&m.media_url_https, t);
|
||||
return cache_media(&m.media_url_https, t).await;
|
||||
},
|
||||
_ => {
|
||||
match &m.video_info {
|
||||
Some(v) => {
|
||||
for variant in &v.variants {
|
||||
if variant.content_type == "video/mp4" {
|
||||
return cache_media(&variant.url, t);
|
||||
return cache_media(&variant.url, t).await;
|
||||
}
|
||||
}
|
||||
return Err(Box::new(ScootalooError::new(format!("Media Type for {} is video but no mp4 file URL is available", &m.url).as_str())));
|
||||
return Err(ScootalooError::new(&format!("Media Type for {} is video but no mp4 file URL is available", &m.url)).into());
|
||||
},
|
||||
None => {
|
||||
return Err(Box::new(ScootalooError::new(format!("Media Type for {} is video but does not contain any video_info", &m.url).as_str())));
|
||||
return Err(ScootalooError::new(&format!("Media Type for {} is video but does not contain any video_info", &m.url)).into());
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -135,7 +143,7 @@ fn get_tweet_media(m: &MediaEntity, t: &str) -> Result<String, Box<dyn Error>> {
|
||||
/*
|
||||
* Those functions are related to the Mastodon side of things
|
||||
*/
|
||||
/// Get Mastodon Data
|
||||
/// Gets Mastodon Data
|
||||
fn get_mastodon_token(masto: &MastodonConfig) -> Mastodon {
|
||||
let data = Data {
|
||||
base: Cow::from(String::from(&masto.base)),
|
||||
@@ -148,7 +156,7 @@ fn get_mastodon_token(masto: &MastodonConfig) -> Mastodon {
|
||||
Mastodon::from(data)
|
||||
}
|
||||
|
||||
/// build toot text from tweet
|
||||
/// Builds toot text from tweet
|
||||
fn build_basic_status(tweet: &Tweet) -> Result<String, Box<dyn Error>> {
|
||||
let mut toot = String::from(&tweet.text);
|
||||
|
||||
@@ -174,31 +182,26 @@ fn build_basic_status(tweet: &Tweet) -> Result<String, Box<dyn Error>> {
|
||||
/*
|
||||
* Generic private functions
|
||||
*/
|
||||
fn cache_media(u: &str, t: &str) -> Result<String, Box<dyn Error>> {
|
||||
/// Gets and caches Twitter Media inside the determined temp dir
|
||||
async fn cache_media(u: &str, t: &str) -> Result<String, Box<dyn Error>> {
|
||||
// create dir
|
||||
if !Path::new(t).is_dir() {
|
||||
create_dir_all(t)?;
|
||||
}
|
||||
create_dir_all(t).await?;
|
||||
|
||||
// get file
|
||||
let client = Client::new();
|
||||
let mut response = client.get(u).send()?;
|
||||
let mut response = reqwest::get(u).await?;
|
||||
|
||||
// create local file
|
||||
let dest_filename = match response.url()
|
||||
.path_segments()
|
||||
.and_then(|segments| segments.last()) {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
return Err(Box::new(ScootalooError::new(format!("Cannot determine the destination filename for {}", u).as_str())));
|
||||
},
|
||||
};
|
||||
let url = Url::parse(u)?;
|
||||
let dest_filename = url.path_segments().ok_or_else(|| ScootalooError::new(&format!("Cannot determine the destination filename for {}", u)))?
|
||||
.last().ok_or_else(|| ScootalooError::new(&format!("Cannot determine the destination filename for {}", u)))?;
|
||||
|
||||
let dest_filepath = format!("{}/{}", t, dest_filename);
|
||||
|
||||
let mut dest_file = File::create(&dest_filepath)?;
|
||||
let mut dest_file = File::create(&dest_filepath).await?;
|
||||
|
||||
copy(&mut response, &mut dest_file)?;
|
||||
while let Some(chunk) = response.chunk().await? {
|
||||
copy(&mut &*chunk, &mut dest_file).await?;
|
||||
}
|
||||
|
||||
Ok(dest_filepath)
|
||||
}
|
||||
@@ -284,7 +287,7 @@ pub fn parse_toml(toml_file: &str) -> Config {
|
||||
|
||||
/// Generic register function
|
||||
/// As this function is supposed to be run only once, it will panic for every error it encounters
|
||||
/// Most of this function is a direct copy/paste of the official `mammut` crate
|
||||
/// Most of this function is a direct copy/paste of the official `elefren` crate
|
||||
pub fn register(host: &str) {
|
||||
let mut builder = App::builder();
|
||||
builder.client_name(Cow::from(String::from(env!("CARGO_PKG_NAME"))))
|
||||
@@ -312,7 +315,8 @@ pub fn register(host: &str) {
|
||||
}
|
||||
|
||||
/// This is where the magic happens
|
||||
pub fn run(config: Config) {
|
||||
#[tokio::main]
|
||||
pub async fn run(config: Config) {
|
||||
// retrieve the last tweet ID for the username
|
||||
let last_tweet_id = read_state(&config.scootaloo.last_tweet_path);
|
||||
|
||||
@@ -323,13 +327,13 @@ pub fn run(config: Config) {
|
||||
let mastodon = get_mastodon_token(&config.mastodon);
|
||||
|
||||
// get user timeline feed (Vec<tweet>)
|
||||
let mut feed = get_user_timeline(&config, token, last_tweet_id).unwrap_or_else(|e|
|
||||
let mut feed = get_user_timeline(&config, token, last_tweet_id).await.unwrap_or_else(|e|
|
||||
panic!("Something went wrong when trying to retrieve {}’s timeline: {}", &config.twitter.username, e)
|
||||
);
|
||||
|
||||
// empty feed -> exiting
|
||||
if feed.is_empty() {
|
||||
println!("Nothing to retrieve since last time, exiting…");
|
||||
info!("Nothing to retrieve since last time, exiting…");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -338,9 +342,11 @@ pub fn run(config: Config) {
|
||||
|
||||
for tweet in &feed {
|
||||
// determine if the tweet is part of a thread (response to self) or a standard response
|
||||
debug!("Treating Tweet {} inside feed", tweet.id);
|
||||
if let Some(r) = &tweet.in_reply_to_screen_name {
|
||||
if &r.to_lowercase() != &config.twitter.username.to_lowercase() {
|
||||
// we are responding not threading
|
||||
info!("Tweet is a direct response, skipping");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
@@ -349,7 +355,7 @@ pub fn run(config: Config) {
|
||||
let mut status_text = match build_basic_status(tweet) {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
println!("Could not create status from tweet {}: {}", tweet.id ,e);
|
||||
error!("Could not create status from tweet {}: {}", tweet.id ,e);
|
||||
continue;
|
||||
},
|
||||
};
|
||||
@@ -359,39 +365,38 @@ pub fn run(config: Config) {
|
||||
// reupload the attachments if any
|
||||
if let Some(m) = &tweet.extended_entities {
|
||||
for media in &m.media {
|
||||
let local_tweet_media_path = match get_tweet_media(&media, &config.scootaloo.cache_path) {
|
||||
let local_tweet_media_path = match get_tweet_media(&media, &config.scootaloo.cache_path).await {
|
||||
Ok(m) => m,
|
||||
Err(e) => {
|
||||
println!("Cannot get tweet media for {}: {}", &media.url, e);
|
||||
warn!("Cannot get tweet media for {}: {}", &media.url, e);
|
||||
continue;
|
||||
},
|
||||
};
|
||||
|
||||
let mastodon_media_ids = match mastodon.media(Cow::from(String::from(&local_tweet_media_path))) {
|
||||
Ok(m) => {
|
||||
remove_file(&local_tweet_media_path).unwrap_or_else(|e|
|
||||
println!("Attachment for {} has been upload, but I’m unable to remove the existing file: {}", &local_tweet_media_path, e)
|
||||
remove_file(&local_tweet_media_path).await.unwrap_or_else(|e|
|
||||
warn!("Attachment {} has been uploaded but I’m unable to remove the existing file: {}", &local_tweet_media_path, e)
|
||||
);
|
||||
m.id
|
||||
},
|
||||
Err(e) => {
|
||||
println!("Cannot attach media {} to Mastodon Instance: {}", &local_tweet_media_path, e);
|
||||
error!("Attachment {} cannot be uploaded to Mastodon Instance: {}", &local_tweet_media_path, e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
status_medias.push(mastodon_media_ids);
|
||||
|
||||
// last step, removing the reference to the media from with the toot’s text
|
||||
status_text = status_text.replace(&media.url, "");
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Building corresponding Mastodon status");
|
||||
let status = StatusBuilder::new()
|
||||
.status(&status_text)
|
||||
.media_ids(status_medias)
|
||||
.build()
|
||||
.expect(format!("Cannot build status with text {}", &status_text).as_str());
|
||||
.expect(&format!("Cannot build status with text {}", &status_text));
|
||||
|
||||
// publish status
|
||||
mastodon.new_status(status).unwrap();
|
||||
|
Reference in New Issue
Block a user