diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index bf8307a..de06ebc 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -91,7 +91,7 @@ jobs: # Generate a changelog for the new release using Git - name: Generate a changelog - uses: orhun/git-cliff-action@v4.4.1 + uses: orhun/git-cliff-action@v4.4.2 id: git-cliff with: config: cliff.toml # The configuration file for git-cliff diff --git a/CHANGELOG.md b/CHANGELOG.md index 556a3ee..78227d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,29 @@ All notable changes to this project will be documented in this file. +## [1.1.0-alpha.36] - 2024-12-07 + +### Features + +- Add `OpenGraphMeta` struct for HTML meta tags + +### Bug Fixes + +- Correct typo in `farcaster_meta` HTML tag + +### Refactor + +- Remove `handle_mini_app` function +- Rename `routes` module to `handlers` +- Rename `utils` module to `helpers` +- Simplify SQID handling logic +- Rename `handle_og_image` to `generate_og_image` +- Rename functions for clarity + +### Styling + +- Remove redundant comments + ## [1.1.0-alpha.35] - 2024-12-04 ### Bug Fixes diff --git a/Cargo.lock b/Cargo.lock index 219af97..6198787 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -794,7 +794,7 @@ checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "lilnouns-click" -version = "1.1.0-alpha.35" +version = "1.1.0-alpha.36" dependencies = [ "graphql_client", "html-escape", diff --git a/Cargo.toml b/Cargo.toml index a4a099e..6d9a461 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,12 +1,13 @@ [package] name = "lilnouns-click" -version = "1.1.0-alpha.35" +version = "1.1.0-alpha.36" authors = ["Milad Nekofar "] edition = "2021" description = "A Nounish URL shortener for LilNouns DAO." repository = "https://github.com/lilnouns/lilnouns-click" license = "Apache-2.0" include = ["*.graphql"] +rust-version = "1.83.0" # https://github.com/rustwasm/wasm-pack/issues/1247 [package.metadata.wasm-pack.profile.release] diff --git a/package.json b/package.json index 214274b..e8f5329 100644 --- a/package.json +++ b/package.json @@ -1,14 +1,15 @@ { - "name": "lilnouns-click", - "private": true, - "scripts": { - "deploy": "wrangler deploy --env dev", - "dev": "wrangler dev" - }, - "devDependencies": { - "wrangler": "3.92.0" - }, + "name": "lilnouns-click", + "private": true, + "scripts": { + "deploy": "wrangler deploy --env dev", + "dev": "wrangler dev" + }, + "devDependencies": { + "wrangler": "3.92.0" + }, "resolutions": { - "undici": ">=5.28.4" + "undici": ">=5.28.4", + "cookie": ">=0.7.0" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1078268..05db0ec 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -6,6 +6,7 @@ settings: overrides: undici: '>=5.28.4' + cookie: '>=0.7.0' importers: @@ -239,9 +240,9 @@ packages: resolution: {integrity: sha512-n8enUVCED/KVRQlab1hr3MVpcVMvxtZjmEa956u+4YijlmQED223XMSYj2tLuKvr4jcCTzNNMpQDUer72MMmzA==} engines: {node: '>= 14.16.0'} - cookie@0.5.0: - resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} - engines: {node: '>= 0.6'} + cookie@1.0.1: + resolution: {integrity: sha512-Xd8lFX4LM9QEEwxQpF9J9NTUh8pmdJO0cyRJhFiDoLTk2eH8FXlRv2IFGYVadZpqI3j8fhNrSdKCeYPxiAhLXw==} + engines: {node: '>=18'} data-uri-to-buffer@2.0.2: resolution: {integrity: sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==} @@ -584,7 +585,7 @@ snapshots: dependencies: readdirp: 4.0.2 - cookie@0.5.0: {} + cookie@1.0.1: {} data-uri-to-buffer@2.0.2: {} @@ -788,7 +789,7 @@ snapshots: youch@3.3.3: dependencies: - cookie: 0.5.0 + cookie: 1.0.1 mustache: 4.2.0 stacktracey: 2.1.8 diff --git a/src/handlers.rs b/src/handlers.rs new file mode 100644 index 0000000..903d594 --- /dev/null +++ b/src/handlers.rs @@ -0,0 +1,349 @@ +use html_escape::encode_safe; +use html_minifier::minify; +use serde::{Deserialize, Serialize}; +use sqids::Sqids; +use url::Url; +use worker::{Error, Request, Response, ResponseBody, Result, RouteContext}; + +use crate::{ + handlers::{ + Community::LilNouns, + Platform::{Ethereum, LilCamp, MetaGov, PropLot}, + }, + helpers::create_og_image, + queries::{fetch_lil_nouns_data, fetch_meta_gov_data, fetch_prop_lot_data}, +}; + +#[derive(Debug, Serialize, Deserialize)] +struct UrlPayload { + pub url: String, + pub sqid: Option, +} + +#[derive(Debug, PartialEq)] +pub enum Community { + LilNouns = 1, +} + +impl Community { + fn from_id(id: u64) -> Option { + match id { + 1 => Some(LilNouns), + _ => None, + } + } +} + +#[derive(Debug, PartialEq)] +pub enum Platform { + Ethereum = 1, + PropLot = 2, + MetaGov = 3, + LilCamp = 4, +} + +impl Platform { + fn from_id(id: u64) -> Option { + match id { + 1 => Some(Ethereum), + 2 => Some(PropLot), + 3 => Some(MetaGov), + 4 => Some(LilCamp), + _ => None, + } + } +} + +struct DecodedSqid { + community: Community, + platform: Platform, + id: u64, +} + +fn decode_sqid(sqid: &str) -> Option { + let sqids = Sqids::default(); + let numbers = sqids.decode(sqid); + + if numbers.len() < 3 { + return None; + } + + let community = Community::from_id(numbers[0])?; + let platform = Platform::from_id(numbers[1])?; + let id = numbers[2]; + + Some(DecodedSqid { + community, + platform, + id, + }) +} + +fn build_url(base_url: &str, id: u64, campaign: &str) -> String { + format!( + "{}/{}?utm_source=farcaster&utm_medium=social&utm_campaign={}&utm_content=proposal_{}", + base_url, id, campaign, id + ) +} + +fn build_image_url(url: &Url, sqid: &str) -> String { + url + .join(&format!("{}/og.png", sqid)) + .map_or_else(|_| String::new(), |full_url| full_url.to_string()) +} + +struct OpenGraphMeta { + title: String, + description: String, + image: String, + url: String, +} + +impl OpenGraphMeta { + fn to_html(&self) -> String { + let farcaster_meta = format!( + r#" + + + "#, + image = self.image, + ); + + format!( + r#" + + + + + + + + + + + {farcaster_meta} + "#, + url = self.url, + title = encode_safe(&self.title), + description = encode_safe(&self.description), + image = self.image, + farcaster_meta = farcaster_meta, + ) + } +} + +pub async fn generate_redirect_page(req: Request, ctx: RouteContext) -> Result { + if let Some(sqid) = ctx.param("sqid") { + let ga_id = ctx.secret("GA_ID")?; + let decoded = decode_sqid(&sqid).ok_or_else(|| Error::from("Invalid SQID"))?; + + let (url, title, description, image) = match (decoded.community, decoded.platform) { + (LilNouns, Ethereum) => { + let url = build_url("https://lilnouns.wtf/vote", decoded.id, "governor"); + let (title, description) = fetch_lil_nouns_data(&ctx.env, decoded.id).await?; + let image = build_image_url(&req.url()?, &sqid); + (url, title, description, image) + } + (LilNouns, PropLot) => { + let url = build_url("https://lilnouns.proplot.wtf/idea", decoded.id, "proplot"); + let (title, description) = fetch_prop_lot_data(&ctx.env, decoded.id).await?; + let image = build_image_url(&req.url()?, &sqid); + (url, title, description, image) + } + (LilNouns, MetaGov) => { + let url = build_url("https://lilnouns.wtf/vote/nounsdao", decoded.id, "metagov"); + let (title, description) = fetch_meta_gov_data(&ctx.env, decoded.id).await?; + let image = build_image_url(&req.url()?, &sqid); + (url, title, description, image) + } + (LilNouns, LilCamp) => { + let url = build_url("https://lilnouns.camp/proposals", decoded.id, "governor"); + let (title, description) = fetch_lil_nouns_data(&ctx.env, decoded.id).await?; + let image = build_image_url(&req.url()?, &sqid); + (url, title, description, image) + } + }; + + let og_meta = OpenGraphMeta { + title: title.clone(), + description: description.clone(), + image, + url: url.clone(), + }; + + let html_doc = format!( + r#" + + + + + + {og_meta} + + + {title} + + + + + + + + + + + +
+
+ Loading Skull +

Redirecting...

+

{title}

+
+
+ + + "#, + og_meta = og_meta.to_html(), + url = url, + title = encode_safe(&title), + description = encode_safe(&description), + ga_id = ga_id, + ); + + let minified_html = minify(html_doc).expect("Failed to minify HTML"); + + let response = Response::from_body(ResponseBody::Body(minified_html.as_bytes().to_vec())); + + return match response { + Ok(mut res) => { + res.headers_mut().set("Content-Type", "text/html")?; + return Ok(res); + } + Err(e) => Err(e), + }; + } + + Response::error("Bad Request", 400) +} + +pub async fn generate_og_image_url(_req: Request, ctx: RouteContext) -> Result { + if let Some(sqid) = ctx.param("sqid") { + let decoded = decode_sqid(&sqid).ok_or_else(|| Error::from("Invalid SQID"))?; + + let image = match (decoded.community, decoded.platform) { + (LilNouns, Ethereum) => { + let (title, description) = fetch_lil_nouns_data(&ctx.env, decoded.id).await?; + create_og_image(decoded.id, &title.to_uppercase(), &description, Ethereum) + } + (LilNouns, PropLot) => { + let (title, description) = fetch_prop_lot_data(&ctx.env, decoded.id).await?; + create_og_image(decoded.id, &title.to_uppercase(), &description, PropLot) + } + (LilNouns, MetaGov) => { + let (title, description) = fetch_meta_gov_data(&ctx.env, decoded.id).await?; + create_og_image(decoded.id, &title.to_uppercase(), &description, MetaGov) + } + (LilNouns, LilCamp) => { + let (title, description) = fetch_lil_nouns_data(&ctx.env, decoded.id).await?; + create_og_image(decoded.id, &title.to_uppercase(), &description, LilCamp) + } + }; + + return Response::redirect(Url::parse(&*image)?); + } + + Response::error("Bad Request", 400) +} + +pub async fn generate_from_url(mut req: Request, _ctx: RouteContext) -> Result { + let sqids = Sqids::default(); + let mut numbers: Vec = Vec::new(); + + if let Ok(payload) = req.json::().await { + let url = Url::parse(&*payload.url).expect("Invalid URL"); + + return match url.host_str() { + Some("lilnouns.wtf") | Some("www.lilnouns.wtf") => { + let segments: Vec<_> = url + .path_segments() + .expect("Cannot get path segments") + .filter(|segment| !segment.is_empty()) + .collect(); + + if segments.is_empty() || segments[0] != "vote" { + return Response::error("Bad Request", 400); + } + + if segments[1] == "nounsdao" { + numbers.push(LilNouns as u64); + numbers.push(MetaGov as u64); + numbers.push(segments[2].parse::().unwrap().try_into()?); + } else { + numbers.push(LilNouns as u64); + numbers.push(Ethereum as u64); + numbers.push(segments[1].parse::().unwrap().try_into()?); + } + + Response::from_json(&UrlPayload { + url: url.into(), + sqid: Some(sqids.encode(&*numbers).unwrap()), + }) + } + Some("lilnouns.proplot.wtf") | Some("www.lilnouns.proplot.wtf") => { + numbers.push(LilNouns as u64); + + let segments: Vec<_> = url + .path_segments() + .expect("Cannot get path segments") + .filter(|segment| !segment.is_empty()) + .collect(); + + if segments[0] == "idea" { + numbers.push(PropLot as u64); + numbers.push(segments[1].parse::().unwrap().try_into()?); + } else { + return Response::error("Bad Request", 400); + } + + Response::from_json(&UrlPayload { + url: url.into(), + sqid: Some(sqids.encode(&*numbers).unwrap()), + }) + } + Some("lilnouns.camp") | Some("www.lilnouns.camp") => { + numbers.push(LilNouns as u64); + + let segments: Vec<_> = url + .path_segments() + .expect("Cannot get path segments") + .filter(|segment| !segment.is_empty()) + .collect(); + + if segments.is_empty() || segments[0] != "proposals" { + return Response::error("Bad Request", 400); + } + + if segments[0] == "proposals" { + numbers.push(LilCamp as u64); + numbers.push(segments[1].parse::().unwrap().try_into()?); + } else { + return Response::error("Bad Request", 400); + } + + Response::from_json(&UrlPayload { + url: url.into(), + sqid: Some(sqids.encode(&*numbers).unwrap()), + }) + } + _ => Response::error("Bad Request", 400), + }; + } + + Response::error("Bad Request", 400) +} diff --git a/src/utils.rs b/src/helpers.rs similarity index 99% rename from src/utils.rs rename to src/helpers.rs index b474eac..8108882 100644 --- a/src/utils.rs +++ b/src/helpers.rs @@ -2,7 +2,7 @@ use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC}; use regex::Regex; use unidecode::unidecode; -use crate::routes::Platform; +use crate::handlers::Platform; pub fn truncate_and_clean_string(input: &str, limit: usize) -> String { // Check if the first line contains "#" diff --git a/src/lib.rs b/src/lib.rs index f7ef0e4..c9456c5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,24 +1,21 @@ use url::Url; use worker::{event, Context, Env, Request, Response, Result, Router}; +mod handlers; +mod helpers; mod queries; -mod routes; -mod utils; #[event(fetch)] async fn main(req: Request, env: Env, _ctx: Context) -> Result { - let router = Router::new(); - - router + Router::new() .get("/", |_, _| { Response::redirect(Url::parse( "https://lilnouns.camp?utm_source=farcaster&utm_medium=social", )?) }) - .get_async("/:sqid", routes::handle_redirect) - .get_async("/:sqid/og.png", routes::handle_og_image) - .post_async("/", routes::handle_creation) - .on_async("/app/:sqid", routes::handle_mini_app) + .get_async("/:sqid", handlers::generate_redirect_page) + .get_async("/:sqid/og.png", handlers::generate_og_image_url) + .post_async("/", handlers::generate_from_url) .run(req, env) .await } diff --git a/src/queries.rs b/src/queries.rs index 7d34861..6ee8fa3 100644 --- a/src/queries.rs +++ b/src/queries.rs @@ -3,7 +3,7 @@ use log::{debug, error}; use reqwest::Client; use worker::{Env, Result}; -use crate::utils::truncate_and_clean_string; +use crate::helpers::truncate_and_clean_string; #[derive(GraphQLQuery)] #[graphql( diff --git a/src/routes.rs b/src/routes.rs deleted file mode 100644 index c0ef401..0000000 --- a/src/routes.rs +++ /dev/null @@ -1,451 +0,0 @@ -use html_escape::encode_safe; -use html_minifier::minify; -use percent_encoding::{percent_encode, utf8_percent_encode, NON_ALPHANUMERIC}; -use serde::{Deserialize, Serialize}; -use serde_json::json; -use sqids::Sqids; -use url::Url; -use worker::{Headers, Method, Request, Response, ResponseBody, RouteContext}; - -use crate::{ - queries::{fetch_lil_nouns_data, fetch_meta_gov_data, fetch_prop_lot_data}, - routes::{ - Community::LilNouns, - Platform::{Ethereum, LilCamp, MetaGov, PropLot}, - }, - utils::create_og_image, -}; - -#[derive(Debug, Serialize, Deserialize)] -struct UrlPayload { - pub url: String, - pub sqid: Option, -} - -#[derive(Debug, PartialEq)] -pub enum Community { - LilNouns = 1, -} - -#[derive(Debug, PartialEq)] -pub enum Platform { - Ethereum = 1, - PropLot = 2, - MetaGov = 3, - LilCamp = 4, -} - -pub async fn handle_redirect(req: Request, ctx: RouteContext) -> worker::Result { - if let Some(sqid) = ctx.param("sqid") { - let ga_id = ctx.secret("GA_ID").unwrap(); - let sqids = Sqids::default(); - let numbers = sqids.decode(&sqid); - - let community = match numbers[0] { - 1 => Some(LilNouns), - _ => None, - }; - - let platform = match numbers[1] { - 1 => Some(Ethereum), - 2 => Some(PropLot), - 3 => Some(MetaGov), - 4 => Some(LilCamp), - _ => None, - }; - - let (url, title, description, image) = match (community, platform) { - (Some(LilNouns), Some(Ethereum)) => { - let url = format!( - "{}/{}?utm_source=farcaster&utm_medium=social&utm_campaign=governor&\ - utm_content=proposal_{}", - "https://lilnouns.wtf/vote", numbers[2], numbers[2] - ); - let (title, description) = fetch_lil_nouns_data(&ctx.env, numbers[2]).await?; - let image = req - .url() - .unwrap() - .join(format!("{}/og.png", sqid).as_str()) - .unwrap() - .to_string(); - (url, title, description, image) - } - (Some(LilNouns), Some(PropLot)) => { - let url = format!( - "{}/{}?utm_source=farcaster&utm_medium=social&utm_campaign=proplot&utm_content=idea_{}", - "https://lilnouns.proplot.wtf/idea", numbers[2], numbers[2] - ); - let (title, description) = fetch_prop_lot_data(&ctx.env, numbers[2]).await?; - let image = req - .url() - .unwrap() - .join(format!("{}/og.png", sqid).as_str()) - .unwrap() - .to_string(); - (url, title, description, image) - } - (Some(LilNouns), Some(MetaGov)) => { - let url = format!( - "{}/{}?utm_source=farcaster&utm_medium=social&utm_campaign=metagov&\ - utm_content=proposal_{}", - "https://lilnouns.wtf/vote/nounsdao", numbers[2], numbers[2] - ); - let (title, description) = fetch_meta_gov_data(&ctx.env, numbers[2]).await?; - let image = req - .url() - .unwrap() - .join(format!("{}/og.png", sqid).as_str()) - .unwrap() - .to_string(); - (url, title, description, image) - } - (Some(LilNouns), Some(LilCamp)) => { - let url = format!( - "{}/{}?utm_source=farcaster&utm_medium=social&utm_campaign=governor&\ - utm_content=proposal_{}", - "https://lilnouns.camp/proposals", numbers[2], numbers[2] - ); - let (title, description) = fetch_lil_nouns_data(&ctx.env, numbers[2]).await?; - let image = req - .url() - .unwrap() - .join(format!("{}/og.png", sqid).as_str()) - .unwrap() - .to_string(); - (url, title, description, image) - } - _ => (String::new(), String::new(), String::new(), String::new()), - }; - - let mini_app_url = req - .url() - .unwrap() - .as_str() - .replace(sqid, format!("app/{}", sqid).as_str()); - - let mini_app_url = utf8_percent_encode(&mini_app_url, NON_ALPHANUMERIC).to_string(); - - let html_doc = format!( - r#" - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - {} - - - - - - - - - - - -
-
- Loading Skull -

Redirecting...

-

{}

-
-
- - - "#, - url, // OpenGraph URL - encode_safe(&title), // OpenGraph Title - encode_safe(&description), // OpenGraph Description - image, // OpenGraph Image URL - image, // OpenGraph Image Secure URL - encode_safe(&title), // OpenGraph Image Alt - url, // Twitter URL - encode_safe(&title), // Twitter Title - encode_safe(&description), // Twitter Description - image, // Twitter Image - image, // Farcaster Image - "Read", // Farcaster Button #1 - mini_app_url, // Farcaster Composer URL - mini_app_url, // Farcaster Post URL - url, // Page Refresh URL - encode_safe(&title), // Page Title - encode_safe(&description), // Page Description - ga_id, // Google Analytics ID - ga_id, // Google Analytics ID - url, // Page Content Link URL - encode_safe(&title), // Page Content Link Title - ); - - let minified_html = minify(html_doc).expect("Failed to minify HTML"); - - let response = Response::from_body(ResponseBody::Body(minified_html.as_bytes().to_vec())); - - return match response { - Ok(mut res) => { - res.headers_mut().set("Content-Type", "text/html").unwrap(); - return Ok(res); - } - Err(e) => Err(e), - }; - } - - Response::error("Bad Request", 400) -} - -pub async fn handle_og_image(_req: Request, ctx: RouteContext) -> worker::Result { - if let Some(sqid) = ctx.param("sqid") { - let sqids = Sqids::default(); - let numbers = sqids.decode(&sqid); - - let community = match numbers[0] { - 1 => Some(LilNouns), - _ => None, - }; - - let platform = match numbers[1] { - 1 => Some(Ethereum), - 2 => Some(PropLot), - 3 => Some(MetaGov), - 4 => Some(LilCamp), - _ => None, - }; - - let image = match (community, platform) { - (Some(LilNouns), Some(Ethereum)) => { - let (title, description) = fetch_lil_nouns_data(&ctx.env, numbers[2]).await?; - create_og_image(numbers[2], &title.to_uppercase(), &description, Ethereum) - } - (Some(LilNouns), Some(PropLot)) => { - let (title, description) = fetch_prop_lot_data(&ctx.env, numbers[2]).await?; - create_og_image(numbers[2], &title.to_uppercase(), &description, PropLot) - } - (Some(LilNouns), Some(MetaGov)) => { - let (title, description) = fetch_meta_gov_data(&ctx.env, numbers[2]).await?; - create_og_image(numbers[2], &title.to_uppercase(), &description, MetaGov) - } - (Some(LilNouns), Some(LilCamp)) => { - let (title, description) = fetch_lil_nouns_data(&ctx.env, numbers[2]).await?; - create_og_image(numbers[2], &title.to_uppercase(), &description, LilCamp) - } - _ => String::new(), - }; - - return Response::redirect(Url::parse(&*image)?); - } - - Response::error("Bad Request", 400) -} - -pub async fn handle_creation( - mut req: Request, - _ctx: RouteContext, -) -> worker::Result { - let sqids = Sqids::default(); - let mut numbers: Vec = Vec::new(); - - if let Ok(payload) = req.json::().await { - let url = Url::parse(&*payload.url).expect("Invalid URL"); - - return match url.host_str() { - Some("lilnouns.wtf") | Some("www.lilnouns.wtf") => { - let segments: Vec<_> = url - .path_segments() - .expect("Cannot get path segments") - .filter(|segment| !segment.is_empty()) - .collect(); - - if segments.is_empty() || segments[0] != "vote" { - return Response::error("Bad Request", 400); - } - - if segments[1] == "nounsdao" { - numbers.push(LilNouns as u64); - numbers.push(MetaGov as u64); - numbers.push(segments[2].parse::().unwrap().try_into().unwrap()); - } else { - numbers.push(LilNouns as u64); - numbers.push(Ethereum as u64); - numbers.push(segments[1].parse::().unwrap().try_into().unwrap()); - } - - Response::from_json(&UrlPayload { - url: url.into(), - sqid: Some(sqids.encode(&*numbers).unwrap()), - }) - } - Some("lilnouns.proplot.wtf") | Some("www.lilnouns.proplot.wtf") => { - numbers.push(LilNouns as u64); - - let segments: Vec<_> = url - .path_segments() - .expect("Cannot get path segments") - .filter(|segment| !segment.is_empty()) - .collect(); - - if segments[0] == "idea" { - numbers.push(PropLot as u64); - numbers.push(segments[1].parse::().unwrap().try_into().unwrap()); - } else { - return Response::error("Bad Request", 400); - } - - Response::from_json(&UrlPayload { - url: url.into(), - sqid: Some(sqids.encode(&*numbers).unwrap()), - }) - } - Some("lilnouns.camp") | Some("www.lilnouns.camp") => { - numbers.push(LilNouns as u64); - - let segments: Vec<_> = url - .path_segments() - .expect("Cannot get path segments") - .filter(|segment| !segment.is_empty()) - .collect(); - - if segments.is_empty() || segments[0] != "proposals" { - return Response::error("Bad Request", 400); - } - - if segments[0] == "proposals" { - numbers.push(LilCamp as u64); - numbers.push(segments[1].parse::().unwrap().try_into().unwrap()); - } else { - return Response::error("Bad Request", 400); - } - - Response::from_json(&UrlPayload { - url: url.into(), - sqid: Some(sqids.encode(&*numbers).unwrap()), - }) - } - _ => Response::error("Bad Request", 400), - }; - } - - Response::error("Bad Request", 400) -} - -pub async fn handle_mini_app(req: Request, ctx: RouteContext) -> worker::Result { - if let Some(sqid) = ctx.param("sqid") { - let sqids = Sqids::default(); - let numbers = sqids.decode(&sqid); - - let community = match numbers[0] { - 1 => Some(LilNouns), - _ => None, - }; - - let platform = match numbers[1] { - 1 => Some(Ethereum), - 2 => Some(PropLot), - 3 => Some(MetaGov), - 4 => Some(LilCamp), - _ => None, - }; - - let url = match (community, platform) { - (Some(LilNouns), Some(Ethereum)) => { - format!( - "{}/{}?utm_source=farcaster&utm_medium=social&utm_campaign=governor&\ - utm_content=proposal_{}", - "https://lilnouns.wtf/vote", numbers[2], numbers[2] - ) - } - (Some(LilNouns), Some(PropLot)) => { - format!( - "{}/{}?utm_source=farcaster&utm_medium=social&utm_campaign=proplot&utm_content=idea_{}", - "https://lilnouns.proplot.wtf/idea", numbers[2], numbers[2] - ) - } - (Some(LilNouns), Some(MetaGov)) => { - format!( - "{}/{}?utm_source=farcaster&utm_medium=social&utm_campaign=metagov&\ - utm_content=proposal_{}", - "https://lilnouns.wtf/vote/nounsdao", numbers[2], numbers[2] - ) - } - (Some(LilNouns), Some(LilCamp)) => { - format!( - "{}/{}?utm_source=farcaster&utm_medium=social&utm_campaign=governor&\ - utm_content=proposal_{}", - "https://lilnouns.camp/proposals", numbers[2], numbers[2] - ) - } - _ => String::new(), - }; - - match req.method() { - Method::Get => { - let json_response = json!({ - "aboutUrl": "https://lilnouns.click", - "action": { - "type": "post" - }, - "description": "Just like Nouns!", - "icon": "book", - "imageUrl": "https://i.imgur.com/DgSx9mw.png", - "name": "Lil Nouns", - "type": "composer" - }); - - let mut headers = Headers::new(); - headers.set("Content-Type", "application/json").unwrap(); - - return Response::from_json(&json_response); - } - Method::Post => { - let json_response = json!({ - "url": url, - "title": "Lil Nouns", - "type": "form" - }); - - let mut headers = Headers::new(); - headers.set("Content-Type", "application/json").unwrap(); - - return Response::from_json(&json_response); - } - _ => return Response::error("Bad Request", 400), - } - } - - Response::error("Bad Request", 400) -} diff --git a/wrangler.toml b/wrangler.toml index 8efe0cf..794ef4d 100644 --- a/wrangler.toml +++ b/wrangler.toml @@ -1,6 +1,6 @@ name = "lilnouns-click" main = "build/worker/shim.mjs" -compatibility_date = "2023-03-22" +compatibility_date = "2024-09-22" [build] command = "cargo install -q -f worker-build && worker-build --release"