Initial analytics #36

Merged
BlakeRain merged 11 commits from analytics into main 2023-09-16 23:01:03 +00:00
30 changed files with 6215 additions and 14 deletions
Showing only changes of commit 9c969007c1 - Show all commits

View File

@ -7,7 +7,7 @@ on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
deploy: deploy-site:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout the Repository - name: Checkout the Repository
@ -20,19 +20,14 @@ jobs:
with: with:
node-version: 18 node-version: 18
- name: Configure Cache
uses: actions/cache@v2
with:
path: |
${{ github.workspace }}/node_modules
**/target
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/yarn.lock') }}
- name: Install Rust Toolchain - name: Install Rust Toolchain
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable
with: with:
targets: wasm32-unknown-unknown targets: wasm32-unknown-unknown
- name: Setup Rust Cache
uses: Swatinem/rust-cache@v2
- name: Install Trunk - name: Install Trunk
uses: jetli/trunk-action@v0.4.0 uses: jetli/trunk-action@v0.4.0
with: with:
@ -63,3 +58,49 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
DISTRIBUTION_ID: ${{ secrets.AWS_CLOUDFRONT_DISTRIBUTION_ID }} DISTRIBUTION_ID: ${{ secrets.AWS_CLOUDFRONT_DISTRIBUTION_ID }}
deploy-analytics-lambda:
runs-on: ubuntu-latest
steps:
- name: Checkout the Repository
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Install the Stable Rust Toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Setup Rust Cache
uses: Swatinem/rust-cache@v2
- name: Install Zig Toolchain
uses: korandoru/setup-zig@v1
with:
zig-version: 0.10.0
- name: Install Cargo Lambda
uses: jaxxstorm/action-install-gh-release@v1.9.0
with:
repo: cargo-lambda/cargo-lambda
- name: Build Lambda Function
run: cargo lambda build --release --arm64 --output-format zip
- name: Configure AWS CLI
run: |
mkdir ~/.aws
echo "[default]" > ~/.aws/config
echo "credential_source = Environment" >> ~/.aws/config
- name: Deploy Lambda Function
run: |
aws lambda update-function-code --function-name analytics_lambda \
--zip-file "fileb://$(pwd)/target/lambda/analytics/bootstrap.zip" --publish
env:
AWS_DEFAULT_REGION: eu-west-1
AWS_ACCESS_KEY_ID: "${{ secrets.ANALYTICS_DEPLOYER_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.ANALYTICS_DEPLOYER_SECRET_ACCESS_KEY }}"

27
Cargo.lock generated
View File

@ -520,6 +520,19 @@ dependencies = [
"unicode-width", "unicode-width",
] ]
[[package]]
name = "getrandom"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi 0.11.0+wasi-snapshot-preview1",
"wasm-bindgen",
]
[[package]] [[package]]
name = "gimli" name = "gimli"
version = "0.27.3" version = "0.27.3"
@ -1891,7 +1904,9 @@ dependencies = [
"enum-iterator", "enum-iterator",
"env_logger", "env_logger",
"gloo 0.10.0", "gloo 0.10.0",
"gloo-events 0.2.0",
"include_dir", "include_dir",
"js-sys",
"log", "log",
"macros", "macros",
"model", "model",
@ -1902,6 +1917,7 @@ dependencies = [
"thiserror", "thiserror",
"time 0.3.26", "time 0.3.26",
"tokio", "tokio",
"uuid",
"wasm-bindgen", "wasm-bindgen",
"wasm-bindgen-futures", "wasm-bindgen-futures",
"wasm-logger", "wasm-logger",
@ -2295,6 +2311,17 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1"
[[package]]
name = "uuid"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d"
dependencies = [
"getrandom",
"serde",
"wasm-bindgen",
]
[[package]] [[package]]
name = "vcpkg" name = "vcpkg"
version = "0.2.15" version = "0.2.15"

View File

@ -36,13 +36,17 @@ time = { version = "0.3", features = ["formatting"] }
async-trait = { version = "0.1" } async-trait = { version = "0.1" }
enum-iterator = { version = "1.4" } enum-iterator = { version = "1.4" }
gloo = { version = "0.10" } gloo = { version = "0.10" }
gloo-events = { version = "0.2" }
include_dir = { version = "0.7" } include_dir = { version = "0.7" }
js-sys = { version = "0.3" }
log = { version = "0.4" } log = { version = "0.4" }
reqwest = { version = "0.11", features = ["json"] } reqwest = { version = "0.11", features = ["json"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0" } serde_json = { version = "1.0" }
thiserror = { version = "1.0" } thiserror = { version = "1.0" }
uuid = { version = "1.4", features = ["js", "serde"] }
wasm-bindgen = { version = "0.2" } wasm-bindgen = { version = "0.2" }
wasm-bindgen-futures = { version = "0.4" }
yew = { version = "0.20" } yew = { version = "0.20" }
yew-hooks = { version = "0.2" } yew-hooks = { version = "0.2" }
yew-router = { version = "0.17" } yew-router = { version = "0.17" }
@ -85,11 +89,14 @@ features = [
[dependencies.web-sys] [dependencies.web-sys]
version = "0.3" version = "0.3"
features = [ features = [
"Blob",
"Document", "Document",
"DomRect", "DomRect",
"Element", "Element",
"IntersectionObserver", "IntersectionObserver",
"IntersectionObserverEntry", "IntersectionObserverEntry",
"Navigator",
"Screen",
"ScrollBehavior", "ScrollBehavior",
"ScrollToOptions", "ScrollToOptions",
"Window" "Window"

View File

@ -1,3 +1,10 @@
[watch]
ignore = [
"analytics",
"cf",
"media"
]
[[hooks]] [[hooks]]
stage = "pre_build" stage = "pre_build"
command = "bash" command = "bash"

1
analytics/lambda/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

2611
analytics/lambda/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,37 @@
[package]
name = "analytics-lambda"
version = "0.1.0"
edition = "2021"
publish = false
[features]
local = []
[dependencies]
async-trait = { version = "0.1" }
env_logger = { version = "0.10" }
fernet = { version = "0.2" }
lambda_runtime = "0.8"
log = { version = "0.4" }
poem = { version = "1.3" }
poem-lambda = { version = "1.3" }
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0" }
time = { version = "0.3", features = ["formatting", "serde"] }
tokio = { version = "1", features = ["full"] }
toml = { version = "0.8" }
tracing = { version = "0.1" }
tracing-subscriber = { version = "0.3", features = ["std", "env-filter", "tracing-log"] }
uuid = { version = "1.2", features = ["v4", "serde"] }
analytics-model = { path = "../model" }
[dependencies.sqlx]
version = "0.7"
features = [
"migrate",
"postgres",
"runtime-tokio-rustls",
"time",
"uuid"
]

View File

@ -0,0 +1 @@
{"errorType":"Runtime.ExitError","errorMessage":"RequestId: e3a97fbd-4294-44aa-b77e-be2fe7d90ed3 Error: Runtime exited with error: exit status 101"}

View File

@ -0,0 +1,56 @@
use analytics_lambda::{
config::{load_from_env, load_from_file},
env::Env,
handlers::{
auth::{new_password, signin},
page_view::{append_page_view, record_page_view},
},
};
use analytics_model::MIGRATOR;
use lambda_runtime::Error;
use poem::{middleware, post, Endpoint, EndpointExt, Route};
async fn create() -> Result<impl Endpoint, Error> {
let config = if cfg!(feature = "local") {
load_from_file()
} else {
load_from_env().await
}?;
let env = Env::new(config).await;
MIGRATOR.run(&env.pool).await?;
Ok(Route::new()
.at("/page_view", post(record_page_view))
.at("/page_view/:id", post(append_page_view))
.at("/auth/sign_in", post(signin))
.at("/auth/new_password", post(new_password))
.data(env)
.with(middleware::Cors::new())
.with(middleware::Tracing))
}
#[tokio::main]
async fn main() -> Result<(), Error> {
let filter_layer = tracing_subscriber::filter::EnvFilter::builder()
.with_default_directive(tracing_subscriber::filter::LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_env_filter(filter_layer)
.without_time()
.with_ansi(cfg!(feature = "local"))
.init();
let endpoint = create().await?;
if cfg!(feature = "local") {
poem::Server::new(poem::listener::TcpListener::bind("127.0.0.1:3000"))
.run(endpoint)
.await?;
} else {
poem_lambda::run(endpoint).await?;
}
Ok(())
}

View File

@ -0,0 +1,57 @@
use std::io::Read;
use lambda_runtime::Error;
use serde::Deserialize;
#[derive(Debug, Deserialize)]
pub struct Config {
pub db: DbConfig,
pub auth: AuthConfig,
}
#[derive(Debug, Deserialize)]
pub struct DbConfig {
pub endpoint: String,
pub port: Option<u16>,
pub username: String,
pub password: String,
pub dbname: String,
}
#[derive(Debug, Deserialize)]
pub struct AuthConfig {
pub token_key: String,
}
pub fn load_from_file() -> Result<Config, Error> {
log::info!("Loading configuration from 'local.toml'");
let path = std::env::current_dir()?.join("local.toml");
if !path.is_file() {
log::error!("Local configuration file 'local.toml' not found");
return Err("Missing configuration file".into());
}
let mut file = std::fs::File::open(path)?;
let mut content = String::new();
file.read_to_string(&mut content)?;
let config = toml::from_str(&content)?;
Ok(config)
}
pub async fn load_from_env() -> Result<Config, Error> {
let endpoint = std::env::var("DATABASE_ENDPOINT")?;
let password = std::env::var("DATABASE_PASSWORD")?;
let token_key = std::env::var("TOKEN_KEY")?;
let db = DbConfig {
endpoint,
port: None,
username: "analytics".to_string(),
password,
dbname: "analytics".to_string(),
};
let auth = AuthConfig { token_key };
Ok(Config { db, auth })
}

View File

@ -0,0 +1,89 @@
use analytics_model::user::User;
use async_trait::async_trait;
use fernet::Fernet;
use poem::{
error::InternalServerError,
http::StatusCode,
web::headers::{self, authorization::Bearer, HeaderMapExt},
Endpoint, Request,
};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use uuid::Uuid;
pub struct AuthEndpoint<E: Endpoint> {
pool: PgPool,
fernet: Fernet,
endpoint: E,
}
impl<E: Endpoint> AuthEndpoint<E> {
pub fn new(pool: PgPool, fernet: Fernet, endpoint: E) -> Self {
Self {
pool,
fernet,
endpoint,
}
}
}
#[async_trait]
impl<E: Endpoint> Endpoint for AuthEndpoint<E> {
type Output = E::Output;
async fn call(&self, mut request: Request) -> poem::Result<Self::Output> {
// Make sure that we have an 'Authorization' header that has a 'Bearer' token.
let Some(auth) = request.headers().typed_get::<headers::Authorization<Bearer>>() else {
log::info!("Missing 'Authorization' header with 'Bearer' token");
return Err(poem::Error::from_status(StatusCode::UNAUTHORIZED));
};
// Ensure that we can decrypt the token using the provided Fernet key.
let Token { user_id } = match Token::decode(&self.fernet, auth.token()) {
Some(token) => token,
None => {
log::error!("Failed to decode authentication token");
return Err(poem::Error::from_status(StatusCode::UNAUTHORIZED));
}
};
// If the user no longer exists, then a simple 401 will suffice.
let Some(user) = sqlx::query_as::<_, User>("SELECT * FROM users WHERE id = $1")
.bind(user_id).fetch_optional(&self.pool).await.map_err(InternalServerError)? else {
log::error!("User '{user_id}' no longer exists");
return Err(poem::Error::from_status(StatusCode::UNAUTHORIZED));
};
// Make sure that the user is still enabled.
if !user.enabled {
log::error!("User '{user_id}' is not enabled");
return Err(poem::Error::from_status(StatusCode::FORBIDDEN));
}
// Store the authenticated user in the request for retrieval by handlers.
request.set_data(user);
self.endpoint.call(request).await
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Token {
pub user_id: Uuid,
}
impl Token {
pub fn new(user_id: Uuid) -> Self {
Self { user_id }
}
pub fn encode(&self, fernet: &Fernet) -> String {
let plain = serde_json::to_string(self).expect("Unable to JSON encode token");
fernet.encrypt(plain.as_bytes())
}
pub fn decode(fernet: &Fernet, encoded: &str) -> Option<Self> {
let plain = fernet.decrypt(encoded).ok()?;
serde_json::from_slice(&plain).ok()
}
}

View File

@ -0,0 +1,60 @@
use std::ops::Deref;
use std::sync::Arc;
use std::time::Duration;
use fernet::Fernet;
use log::LevelFilter;
use sqlx::postgres::PgConnectOptions;
use sqlx::ConnectOptions;
use crate::config::Config;
pub struct Env {
inner: Arc<Inner>,
}
impl Clone for Env {
fn clone(&self) -> Self {
Self {
inner: Arc::clone(&self.inner),
}
}
}
impl Deref for Env {
type Target = Inner;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
pub struct Inner {
pub pool: sqlx::PgPool,
pub fernet: Fernet,
}
impl Env {
pub async fn new(config: Config) -> Self {
let mut connection_opts = PgConnectOptions::new()
.host(&config.db.endpoint)
.username(&config.db.username)
.password(&config.db.password)
.database(&config.db.dbname)
.log_statements(LevelFilter::Debug)
.log_slow_statements(LevelFilter::Warn, Duration::from_secs(1));
if let Some(port) = config.db.port {
connection_opts = connection_opts.port(port);
}
let inner = Inner {
pool: sqlx::PgPool::connect_with(connection_opts).await.unwrap(),
fernet: Fernet::new(&config.auth.token_key).expect("valid fernet key"),
};
Self {
inner: Arc::new(inner),
}
}
}

View File

@ -0,0 +1,2 @@
pub mod auth;
pub mod page_view;

View File

@ -0,0 +1,72 @@
use analytics_model::user::{authenticate, reset_password};
use poem::{
error::InternalServerError,
handler,
web::{Data, Json},
};
use serde::{Deserialize, Serialize};
use crate::{endpoints::auth::Token, env::Env};
#[derive(Deserialize)]
pub struct SignInBody {
username: String,
password: String,
}
#[derive(Serialize)]
#[serde(tag = "type")]
pub enum SignInResponse {
InvalidCredentials,
NewPassword,
Successful { token: String },
}
#[derive(Deserialize)]
pub struct NewPasswordBody {
username: String,
#[serde(rename = "oldPassword")]
old_password: String,
#[serde(rename = "newPassword")]
new_password: String,
}
#[handler]
pub async fn signin(
env: Data<&Env>,
Json(SignInBody { username, password }): Json<SignInBody>,
) -> poem::Result<Json<SignInResponse>> {
let Some(user) = authenticate(&env.pool, &username, &password).await.map_err(InternalServerError)? else {
return Ok(Json(SignInResponse::InvalidCredentials));
};
if user.reset_password {
return Ok(Json(SignInResponse::NewPassword));
}
let token = Token::new(user.id);
let token = token.encode(&env.fernet);
Ok(Json(SignInResponse::Successful { token }))
}
#[handler]
pub async fn new_password(
env: Data<&Env>,
Json(NewPasswordBody {
username,
old_password,
new_password,
}): Json<NewPasswordBody>,
) -> poem::Result<Json<SignInResponse>> {
let Some(user) = authenticate(&env.pool, &username, &old_password).await.map_err(InternalServerError)? else {
return Ok(Json(SignInResponse::InvalidCredentials));
};
let Some(user) = reset_password(&env.pool, user.id, new_password).await.map_err(InternalServerError)? else {
return Ok(Json(SignInResponse::InvalidCredentials));
};
let token = Token::new(user.id);
let token = token.encode(&env.fernet);
Ok(Json(SignInResponse::Successful { token }))
}

View File

@ -0,0 +1,92 @@
use analytics_model::view::{self, create_page_view, PageView};
use poem::{
handler,
web::{Data, Json, Path},
};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use crate::env::Env;
#[derive(Deserialize)]
pub struct PageViewBody {
path: Option<String>,
ua: Option<String>,
vw: Option<i32>,
vh: Option<i32>,
sw: Option<i32>,
sh: Option<i32>,
tz: Option<String>,
rf: Option<String>,
}
#[derive(Serialize)]
pub struct PageViewResponse {
id: Option<Uuid>,
}
#[handler]
pub async fn record_page_view(
env: Data<&Env>,
Json(PageViewBody {
path,
ua,
vw,
vh,
sw,
sh,
tz,
rf,
}): Json<PageViewBody>,
) -> poem::Result<Json<PageViewResponse>> {
let id = if let Some(path) = path {
let id = Uuid::new_v4();
let view = PageView {
id,
path,
time: OffsetDateTime::now_utc(),
user_agent: ua,
viewport_width: vw,
viewport_height: vh,
screen_width: sw,
screen_height: sh,
timezone: tz,
referrer: rf,
beacon: false,
duration: None,
scroll: None,
};
if let Err(err) = create_page_view(&env.pool, view).await {
log::error!("Failed to record page view: {err:?}");
None
} else {
Some(id)
}
} else {
log::info!("Ignoring request for pageview image with no path");
None
};
Ok(Json(PageViewResponse { id }))
}
#[derive(Deserialize)]
pub struct AppendPageViewBody {
duration: f64,
scroll: f64,
}
#[handler]
pub async fn append_page_view(
env: Data<&Env>,
Path(id): Path<Uuid>,
Json(AppendPageViewBody { duration, scroll }): Json<AppendPageViewBody>,
) -> poem::Result<Json<()>> {
if let Err(err) = view::append_page_view(&env.pool, id, duration, scroll).await {
log::error!("Failed to append page view: {err:?}");
}
Ok(Json(()))
}

View File

@ -0,0 +1,7 @@
pub mod config;
pub mod env;
pub mod handlers;
pub mod endpoints {
pub mod auth;
}

1
analytics/model/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

1761
analytics/model/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,23 @@
[package]
name = "analytics-model"
version = "0.1.0"
edition = "2021"
publish = false
[dependencies]
log = { version = "0.4" }
pbkdf2 = { version = "0.12", features = ["simple"] }
rand_core = { version = "0.6", features = ["std"] }
serde = { version = "1.0", features = ["derive"] }
time = { version = "0.3", features = ["formatting", "serde"] }
uuid = { version = "1.2", features = ["v4", "serde"] }
[dependencies.sqlx]
version = "0.7"
features = [
"migrate",
"postgres",
"runtime-tokio-rustls",
"time",
"uuid"
]

View File

@ -0,0 +1,61 @@
CREATE TABLE IF NOT EXISTS page_views (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
path TEXT NOT NULL,
time TIMESTAMP WITH TIME ZONE NOT NULL,
user_agent TEXT,
viewport_width INTEGER,
viewport_height INTEGER,
screen_width INTEGER,
screen_height INTEGER,
timezone TEXT,
referrer TEXT,
beacon BOOLEAN NOT NULL,
duration REAL,
scroll REAL
);
CREATE TABLE IF NOT EXISTS page_views_day (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
path TEXT NOT NULL,
year INTEGER NOT NULL,
month INTEGER NOT NULL,
day INTEGER NOT NULL,
hour INTEGER NOT NULL,
count INTEGER NOT NULL,
total_beacon INTEGER NOT NULL,
total_scroll REAL NOT NULL,
total_duration REAL NOT NULL,
CONSTRAINT unique_page_views_day
UNIQUE (path, year, month, day, hour)
);
CREATE TABLE IF NOT EXISTS page_views_week (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
path TEXT NOT NULL,
year INTEGER NOT NULL,
week INTEGER NOT NULL,
dow INTEGER NOT NULL,
count INTEGER NOT NULL,
total_beacon INTEGER NOT NULL,
total_scroll REAL NOT NULL,
total_duration REAL NOT NULL,
CONSTRAINT unique_page_views_week
UNIQUE (path, year, week, dow)
);
CREATE TABLE IF NOT EXISTS page_views_month (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
path TEXT NOT NULL,
year INTEGER NOT NULL,
month INTEGER NOT NULL,
day INTEGER NOT NULL,
count INTEGER NOT NULL,
total_beacon INTEGER NOT NULL,
total_scroll REAL NOT NULL,
total_duration REAL NOT NULL,
CONSTRAINT unique_page_views_month
UNIQUE (path, year, month, day)
);

View File

@ -0,0 +1,14 @@
CREATE TABLE IF NOT EXISTS users (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
username TEXT NOT NULL,
password TEXT NOT NULL,
enabled BOOLEAN NOT NULL,
reset_password BOOLEAN NOT NULL,
CONSTRAINT unique_username
UNIQUE (username)
);
-- Create an intial user that has a temporary password
INSERT INTO users (username, password, enabled, reset_password)
VALUES("admin", "admin", TRUE, TRUE);

View File

@ -0,0 +1,4 @@
pub mod user;
pub mod view;
pub static MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!();

View File

@ -0,0 +1,72 @@
use pbkdf2::{
password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString},
Pbkdf2,
};
use rand_core::OsRng;
use serde::Serialize;
use sqlx::PgPool;
use uuid::Uuid;
#[derive(Debug, Clone, sqlx::FromRow, Serialize)]
pub struct User {
pub id: Uuid,
pub username: String,
#[serde(skip)]
pub password: String,
pub enabled: bool,
pub reset_password: bool,
}
pub async fn authenticate(
pool: &PgPool,
username: &str,
password: &str,
) -> sqlx::Result<Option<User>> {
let user: User = if let Some(user) = sqlx::query_as("SELECT * FROM users WHERE username = $1")
.bind(username)
.fetch_optional(pool)
.await?
{
user
} else {
log::warn!("User not found with username '{username}'");
return Ok(None);
};
let parsed_hash = PasswordHash::new(&user.password).expect("valid password hash");
if let Err(err) = Pbkdf2.verify_password(password.as_bytes(), &parsed_hash) {
log::error!(
"Incorrect password for user '{username}' ('{}'): {err:?}",
user.id
);
return Ok(None);
}
if !user.enabled {
log::error!("User '{username}' ('{}') is disabled", user.id);
return Ok(None);
}
Ok(Some(user))
}
pub async fn reset_password(
pool: &PgPool,
id: Uuid,
new_password: String,
) -> sqlx::Result<Option<User>> {
let salt = SaltString::generate(&mut OsRng);
let password = Pbkdf2
.hash_password(new_password.as_bytes(), &salt)
.expect("valid password hash")
.to_string();
sqlx::query_as(
"UPDATE users SET password = $1, reset_password = FALSE WHERE id = $2 RETURNING *",
)
.bind(password)
.bind(id)
.fetch_optional(pool)
.await
}

332
analytics/model/src/view.rs Normal file
View File

@ -0,0 +1,332 @@
use sqlx::PgPool;
use time::OffsetDateTime;
use uuid::Uuid;
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct PageView {
pub id: Uuid,
pub path: String,
pub time: OffsetDateTime,
pub user_agent: Option<String>,
pub viewport_width: Option<i32>,
pub viewport_height: Option<i32>,
pub screen_width: Option<i32>,
pub screen_height: Option<i32>,
pub timezone: Option<String>,
pub referrer: Option<String>,
pub beacon: bool,
pub duration: Option<f64>,
pub scroll: Option<f64>,
}
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct PageViewsDay {
pub id: i32,
pub path: String,
pub year: i32,
pub month: i32,
pub day: i32,
pub hour: i32,
pub count: i32,
pub total_beacon: i32,
pub total_scroll: f64,
pub total_duration: f64,
}
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct PageViewsWeek {
pub id: i32,
pub path: String,
pub year: i32,
pub week: i32,
pub dow: i32,
pub count: i32,
pub total_beacon: i32,
pub total_scroll: f64,
pub total_duration: f64,
}
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct PageViewsMonth {
pub id: i32,
pub path: String,
pub year: i32,
pub month: i32,
pub day: i32,
pub count: i32,
pub total_beacon: i32,
pub total_scroll: f64,
pub total_duration: f64,
}
pub async fn create_page_view(pool: &PgPool, view: PageView) -> sqlx::Result<()> {
sqlx::query(
"INSERT INTO page_views
(id, path, time, user_agent,
viewport_width, viewport_height,
screen_width, screen_height,
timezone, referrer,
beacon, duration, scroll)
VALUES ($1, $2, $3, $4,
$5, $6,
$7, $8,
$9, $10,
$11, $12, $13)",
)
.bind(view.id)
.bind(&view.path)
.bind(view.time)
.bind(view.user_agent)
.bind(view.viewport_width)
.bind(view.viewport_height)
.bind(view.screen_width)
.bind(view.screen_height)
.bind(view.timezone)
.bind(view.referrer)
.bind(view.beacon)
.bind(view.duration)
.bind(view.scroll)
.execute(pool)
.await?;
update_count_accumulators(pool, &view.path, view.time).await?;
update_count_accumulators(pool, "", view.time).await?;
Ok(())
}
async fn update_count_accumulators(
pool: &PgPool,
path: &str,
time: OffsetDateTime,
) -> sqlx::Result<()> {
sqlx::query(
"
INSERT INTO page_views_day
(path, year, month, day, hour, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, $5, 1, 0, 0, 0)
ON CONFLICT (path, year, month, day, hour)
DO UPDATE SET
count = page_views_day.count + 1
",
)
.bind(path)
.bind(time.year())
.bind(time.month() as i32)
.bind(time.day() as i32)
.bind(time.hour() as i32)
.execute(pool)
.await?;
sqlx::query(
"
INSERT INTO page_views_week
(path, year, week, dow, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, 1, 0, 0, 0)
ON CONFLICT (path, year, week, dow)
DO UPDATE SET
count = page_views_week.count + 1
",
)
.bind(path)
.bind(time.year())
.bind(time.iso_week() as i32)
.bind(time.weekday().number_days_from_sunday() as i32)
.bind(time.hour() as i32)
.execute(pool)
.await?;
sqlx::query(
"
INSERT INTO page_views_month
(path, year, month, day, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, 1, 0, 0, 0)
ON CONFLICT (path, year, month, day)
DO UPDATE SET
count = page_views_month.count + 1
",
)
.bind(path)
.bind(time.year())
.bind(time.month() as i32)
.bind(time.day() as i32)
.bind(time.hour() as i32)
.execute(pool)
.await?;
Ok(())
}
struct Accumulators {
duration: f64,
scroll: f64,
count_delta: i32,
duration_delta: f64,
scroll_delta: f64,
}
async fn update_beacon_accumulators(
pool: &PgPool,
path: &str,
time: OffsetDateTime,
Accumulators {
duration,
scroll,
count_delta,
duration_delta,
scroll_delta,
}: Accumulators,
) -> sqlx::Result<()> {
sqlx::query(
"
INSERT INTO page_views_day
(path, year, month, day, hour, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, $5, 1, 1, $6, $7)
ON CONFLICT (path, year, month, day, hour)
DO UPDATE SET
total_beacon = page_views_day.total_beacon + $8,
total_scroll = page_views_day.total_scroll + $9,
total_duration = page_views_day.total_duration + $10
",
)
.bind(path)
.bind(time.year())
.bind(time.month() as i32)
.bind(time.day() as i32)
.bind(time.hour() as i32)
.bind(scroll)
.bind(duration)
.bind(count_delta)
.bind(scroll_delta)
.bind(duration_delta)
.execute(pool)
.await?;
sqlx::query(
"
INSERT INTO page_views_week
(path, year, week, dow, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, 1, 1, $5, $6)
ON CONFLICT (path, year, week, dow)
DO UPDATE SET
total_beacon = page_views_week.total_beacon + $7,
total_scroll = page_views_week.total_scroll + $8,
total_duration = page_views_week.total_duration + $9
",
)
.bind(path)
.bind(time.year())
.bind(time.iso_week() as i32)
.bind(time.weekday().number_days_from_sunday() as i32)
.bind(scroll)
.bind(duration)
.bind(count_delta)
.bind(scroll_delta)
.bind(duration_delta)
.execute(pool)
.await?;
sqlx::query(
"
INSERT INTO page_views_month
(path, year, month, day, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, 1, 1, $5, $6)
ON CONFLICT (path, year, month, day)
DO UPDATE SET
total_beacon = page_views_month.total_beacon + $7,
total_scroll = page_views_month.total_scroll + $8,
total_duration = page_views_month.total_duration + $9
",
)
.bind(path)
.bind(time.year())
.bind(time.month() as i32)
.bind(time.day() as i32)
.bind(scroll)
.bind(duration)
.bind(count_delta)
.bind(scroll_delta)
.bind(duration_delta)
.execute(pool)
.await?;
Ok(())
}
pub async fn append_page_view(
pool: &PgPool,
uuid: Uuid,
duration: f64,
scroll: f64,
) -> sqlx::Result<()> {
let view = match sqlx::query_as::<_, PageView>("SELECT * FROM page_views WHERE id = $1")
.bind(uuid)
.fetch_optional(pool)
.await?
{
Some(view) => view,
None => {
log::warn!("Ignoring append for page view '{uuid}' which does not exist");
return Ok(());
}
};
// If the beacon has already been received, we want to subtract the last recorded duration and
// scroll distance from our totals before we then add the new duration and scroll distance.
let (count_delta, duration_delta, scroll_delta) = if view.beacon {
(
0,
duration - view.duration.unwrap_or(0.0),
scroll - view.scroll.unwrap_or(0.0),
)
} else {
(1, duration, scroll)
};
// Update the page view record with the received duration and scroll distance, and set the
// beacon flag so we know we've recorded this beacon data into our accumulators.
sqlx::query("UPDATE page_views SET duration = $1, scroll = $2, beacon = $3 WHERE id = $4")
.bind(duration)
.bind(scroll)
.bind(true)
.bind(uuid)
.execute(pool)
.await?;
// Update the accumulated statistics for the page view path, and the site overall.
update_beacon_accumulators(
pool,
&view.path,
view.time,
Accumulators {
duration,
scroll,
count_delta,
duration_delta,
scroll_delta,
},
)
.await?;
update_beacon_accumulators(
pool,
"",
view.time,
Accumulators {
duration,
scroll,
count_delta,
duration_delta,
scroll_delta,
},
)
.await?;
Ok(())
}

317
cf/analytics.yaml Normal file
View File

@ -0,0 +1,317 @@
#
# analytics.yaml
#
# CloudFormation template for site analytics resources.
#
Description: Site analytics
Parameters:
DomainName:
Type: String
Description: The domain name to use
Default: blakerain.com
HostedZoneId:
Type: String
Description: The hosted zone for the domain
Default: Z2C0W1IB1QO9DO
Outputs:
AnalyticsLambdaDeployerAccessKeyId:
Value: !Ref AnalyticsLambdaDeployerAccessKey
AnalyticsLambdaDeployerSecretAccessKey:
Value: !GetAtt AnalyticsLambdaDeployerAccessKey.SecretAccessKey
Resources:
AnalyticsVpc:
Type: AWS::EC2::VPC
Properties:
CidrBlock: 10.0.0.0/16
EnableDnsHostnames: true
EnableDnsSupport: true
AnalyticsSubnet1:
Type: AWS::EC2::Subnet
Properties:
VpcId: !Ref AnalyticsVpc
AvailabilityZone: eu-west-1a
CidrBlock: 10.0.4.0/24
AnalyticsSubnet2:
Type: AWS::EC2::Subnet
Properties:
VpcId: !Ref AnalyticsVpc
AvailabilityZone: eu-west-1b
CidrBlock: 10.0.5.0/24
AnalyticsLambdaSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
VpcId: !Ref AnalyticsVpc
GroupDescription: Lambda security group
SecurityGroupIngress:
- IpProtocol: tcp
FromPort: "2049"
ToPort: "2049"
CidrIp: "10.0.0.0/16"
AnalyticsDatabaseSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
VpcId: !Ref AnalyticsVpc
GroupDescription: Database security group
SecurityGroupIngress:
- IpProtocol: tcp
FromPort: "5432"
ToPort: "5432"
SourceSecurityGroupId: !Ref AnalyticsLambdaSecurityGroup
Description: Allow inbound PostgreSQL traffic from Lambda functions
AnalyticsDatabaseSubnetGroup:
Type: AWS::RDS::DBSubnetGroup
Properties:
DBSubnetGroupName: analytics_dbsubnet_group
DBSubnetGroupDescription: Analytics database subnet group
SubnetIds:
- !Ref AnalyticsSubnet1
- !Ref AnalyticsSubnet2
AnalyticsDatabase:
Type: AWS::RDS::DBInstance
Properties:
AllocatedStorage: "20"
AutoMinorVersionUpgrade: true
AvailabilityZone: eu-west-1a
BackupRetentionPeriod: 7
DBInstanceClass: db.t4g.micro
DBName: analytics
DBSubnetGroupName: !Ref AnalyticsDatabaseSubnetGroup
Engine: postgres
MasterUsername: analytics
MasterUserPassword: "{{resolve:ssm:analytics_database_password}}"
MaxAllocatedStorage: 250
MultiAZ: false
Port: "5432"
PreferredBackupWindow: "03:00-04:00"
PreferredMaintenanceWindow: "Sun:00:00-Sun:02:00"
PubliclyAccessible: false
VPCSecurityGroups:
- !Ref AnalyticsDatabaseSecurityGroup
AnalyticsLambdaLogGroup:
Type: AWS::Logs::LogGroup
Properties:
RetentionInDays: 365
LogGroupName:
Fn::Join:
- "/"
- - ""
- aws
- lambda
- !Ref AnalyticsLambda
AnalyticsLambdaLoggingPolicy:
Type: AWS::IAM::Policy
Properties:
PolicyName: analytics_lambda_logging_policy
PolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Action:
- "logs:CreateLogStream"
- "logs:PutLogEvents"
Resource: !GetAtt AnalyticsLambdaLogGroup.Arn
Roles:
- !Ref AnalyticsLambdaRole
AnalyticsLambdaPolicy:
Type: AWS::IAM::Policy
Properties:
PolicyName: analytics_lambda_policy
PolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Action:
- "ec2:CreateNetworkInterface"
- "ec2:DeleteNetworkInterface"
- "ec2:DescribeNetworkInterfaces"
- "ec2:AssignPrivateIpAddresses"
- "ec2:UnassignPrivateIpAddresses"
Resource: "*"
- Effect: Allow
Action:
- "logs:CreateLogGroup"
Resource:
Fn::Sub: "arn:aws:logs:${AWS::Region}:${AWS::AccountId}::*"
Roles:
- !Ref AnalyticsLambdaRole
AnalyticsLambdaRole:
Type: AWS::IAM::Role
Properties:
RoleName: analytics_lambda_role
AssumeRolePolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Principal:
Service: lambda.amazonaws.com
Action: "sts:AssumeRole"
AnalyticsLambda:
Type: AWS::Lambda::Function
Properties:
FunctionName: analytics_lambda
Description: "Site analytics"
Handler: unused
Architectures:
- arm64
MemorySize: 512
Runtime: provided.al2
Timeout: 360
Role: !GetAtt AnalyticsLambdaRole.Arn
Code:
S3Bucket: private.s3.blakerain.com
S3Key: default-function.zip
Environment:
Variables:
RUST_LOG: info
DATABASE_ENDPOINT: !GetAtt AnalyticsDatabase.Endpoint.Address
DATABASE_PASSWORD: "{{resolve:ssm:analytics_database_password}}"
VpcConfig:
SubnetIds:
- !Ref AnalyticsSubnet1
- !Ref AnalyticsSubnet2
SecurityGroupIds:
- !Ref AnalyticsLambdaSecurityGroup
DependsOn:
- AnalyticsLambdaPolicy
AnalyticsLambdaDeployer:
Type: AWS::IAM::User
Properties:
UserName: analytics_lambda_deployer
AnalyticsLambdaDeployerAccessKey:
Type: AWS::IAM::AccessKey
Properties:
UserName: !Ref AnalyticsLambdaDeployer
AnalyticsApi:
Type: AWS::ApiGatewayV2::Api
Properties:
Name: blakerain_analytics_api
Description: Analytics API
ProtocolType: HTTP
AnalyticsApiIntegration:
Type: AWS::ApiGatewayV2::Integration
Properties:
ApiId: !Ref AnalyticsApi
ConnectionType: INTERNET
IntegrationMethod: POST
IntegrationType: AWS_PROXY
TimeoutInMillis: 30000
PayloadFormatVersion: "2.0"
IntegrationUri: !GetAtt AnalyticsLambda.Arn
AnalyticsApiRouteDefault:
Type: AWS::ApiGatewayV2::Route
Properties:
ApiId: !Ref AnalyticsApi
ApiKeyRequired: false
RouteKey: "$default"
Target:
Fn::Join:
- "/"
- - integrations
- !Ref AnalyticsApiIntegration
AnalyticsApiLogGroup:
Type: AWS::Logs::LogGroup
Properties:
RetentionInDays: 365
LogGroupName: "/aws/apigateway/blakerain_analytics_api"
AnalyticsApiStage:
Type: AWS::ApiGatewayV2::Stage
Properties:
ApiId: !Ref AnalyticsApi
StageName: "$default"
AutoDeploy: true
AccessLogSettings:
DestinationArn: !GetAtt AnalyticsApiLogGroup.Arn
Format: '$context.identity.sourceIp - - [$context.requestTime] "$context.httpMethod $context.routeKey $context.protocol" $context.status $context.responseLength $context.requestId'
AnalyticsApiPermission:
Type: AWS::Lambda::Permission
Properties:
Action: "lambda:InvokeFunction"
FunctionName: !GetAtt AnalyticsLambda.Arn
Principal: apigateway.amazonaws.com
SourceArn:
Fn::Join:
- ":"
- - "arn:aws:execute-api"
- !Sub "${AWS::Region}"
- !Sub "${AWS::AccountId}"
- Fn::Join:
- "/"
- - !Ref AnalyticsApi
- "*"
- "$default"
AnalyticsApiDomain:
Type: AWS::ApiGatewayV2::DomainName
Properties:
DomainName:
Fn::Join:
- "."
- - analytics
- !Ref DomainName
DomainNameConfigurations:
- CertificateArn: !Ref AnalyticsApiCertificate
EndpointType: REGIONAL
SecurityPolicy: TLS_1_2
AnalyticsApiDomainMapping:
Type: AWS::ApiGatewayV2::ApiMapping
Properties:
ApiId: !Ref AnalyticsApi
DomainName: !Ref AnalyticsApiDomain
Stage: !Ref AnalyticsApiStage
AnalyticsApiCertificate:
Type: AWS::CertificateManager::Certificate
Properties:
DomainName:
Fn::Join:
- "."
- - analytics
- !Ref DomainName
ValidationMethod: DNS
DomainValidationOptions:
- DomainName:
Fn::Join:
- "."
- - analytics
- !Ref DomainName
HostedZoneId: !Ref HostedZoneId
AnalyticsApiRecordSet:
Type: AWS::Route53::RecordSet
Properties:
HostedZoneId: !Ref HostedZoneId
Name:
Fn::Join:
- "."
- - analytics
- !Ref DomainName
Type: A
AliasTarget:
HostedZoneId: !GetAtt AnalyticsApiDomain.RegionalHostedZoneId
DNSName: !GetAtt AnalyticsApiDomain.RegionalDomainName

41
public/analytics.js Normal file
View File

@ -0,0 +1,41 @@
export function getTimezone() {
try {
return Intl.DateTimeFormat().resolvedOptions().timeZone;
} catch {
return null;
}
}
export function getReferrer() {
return document.referrer
.replace(/^https?:\/\/((m|l|w{2,3})([0-9]+)?\.)?([^?#]+)(.*)$/, "$4")
.replace(/^([^/]+)$/, "$1");
}
export function getPosition() {
try {
const doc = window.document.documentElement;
const body = window.document.body;
return Math.min(
100,
5 *
Math.round(
(100 * (doc.scrollTop + doc.clientHeight)) / body.scrollHeight / 5
)
);
} catch {
return 0;
}
}
export function sendBeacon(url, body) {
return fetch(url, {
keepalive: true,
method: "POST",
headers: {
"content-type": "application/json",
},
body,
});
}

View File

@ -5,7 +5,7 @@ fn main() {
wasm_logger::init(wasm_logger::Config::default()); wasm_logger::init(wasm_logger::Config::default());
log::info!( log::info!(
"blakerain.com {}, {} {} build", "blakerain.com {}, {} {} build, compiled {}",
env!("CARGO_PKG_VERSION"), env!("CARGO_PKG_VERSION"),
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
"debug" "debug"
@ -16,11 +16,10 @@ fn main() {
"hydration" "hydration"
} else { } else {
"standard" "standard"
} },
env!("BUILD_TIME")
); );
log::info!("Compiled {}", env!("BUILD_TIME"));
let app = yew::Renderer::<App>::new(); let app = yew::Renderer::<App>::new();
#[cfg(feature = "hydration")] #[cfg(feature = "hydration")]
@ -31,7 +30,7 @@ fn main() {
#[cfg(not(feature = "hydration"))] #[cfg(not(feature = "hydration"))]
{ {
log::info!("Rendering application"); log::info!("Mounting application");
app.render(); app.render();
} }
} }

View File

@ -1,3 +1,4 @@
pub mod analytics;
pub mod blog; pub mod blog;
pub mod content; pub mod content;
pub mod head; pub mod head;

405
src/components/analytics.rs Normal file
View File

@ -0,0 +1,405 @@
use std::rc::Rc;
use js_sys::Promise;
use serde::{Deserialize, Serialize};
use time::{Duration, OffsetDateTime};
use uuid::Uuid;
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
use wasm_bindgen_futures::JsFuture;
use yew::{function_component, html, use_effect_with_deps, use_reducer, Event, Html, Reducible};
use yew_hooks::{
use_async, use_async_with_options, use_event_with_window, UseAsyncHandle, UseAsyncOptions,
};
use yew_router::prelude::use_location;
#[wasm_bindgen(module = "/public/analytics.js")]
extern "C" {
#[wasm_bindgen(js_name = "getTimezone")]
fn get_timezone() -> Option<String>;
#[wasm_bindgen(js_name = "getReferrer")]
fn get_referrer() -> String;
#[wasm_bindgen(js_name = "getPosition")]
fn get_position() -> f64;
#[wasm_bindgen(catch, js_name = "sendBeacon")]
fn send_beacon(url: &str, body: &str) -> Result<Promise, JsValue>;
}
#[derive(Serialize)]
struct AnalyticsData {
path: Option<String>,
ua: Option<String>,
vw: Option<i32>,
vh: Option<i32>,
sw: Option<i32>,
sh: Option<i32>,
tz: Option<String>,
rf: Option<String>,
}
#[derive(Deserialize)]
struct AnalyticsResponse {
id: Option<Uuid>,
}
#[inline]
fn quick_f64_to_i32(value: f64) -> i32 {
value as i32
}
fn should_not_track() -> bool {
let dnt = gloo::utils::window().navigator().do_not_track();
dnt == "1" || dnt == "yes"
}
impl AnalyticsData {
pub fn capture() -> Self {
let window = gloo::utils::window();
let path = if let Ok(mut path) = window.location().pathname() {
if !path.starts_with('/') {
path.insert(0, '/')
}
if path.len() > 1 && path.ends_with('/') {
path.pop().expect("pop");
}
Some(path)
} else {
None
};
Self {
path,
ua: window.navigator().user_agent().ok(),
vw: window
.inner_width()
.expect("inner_width")
.as_f64()
.map(quick_f64_to_i32),
vh: window
.inner_height()
.expect("inner_height")
.as_f64()
.map(quick_f64_to_i32),
sw: window.screen().expect("screen").width().ok(),
sh: window.screen().expect("screen").height().ok(),
tz: get_timezone(),
rf: Some(get_referrer()),
}
}
}
#[derive(Clone)]
struct AnalyticsState {
view_id: Option<Uuid>,
start: OffsetDateTime,
scroll: f64,
visibility: VisibilityState,
}
#[derive(Clone)]
enum VisibilityState {
Unknown,
Visible {
total_hidden: Duration,
},
Hidden {
total: Duration,
start: OffsetDateTime,
},
}
impl Default for VisibilityState {
fn default() -> Self {
Self::Unknown
}
}
impl VisibilityState {
fn from_document() -> Self {
let hidden = gloo::utils::window().document().expect("document").hidden();
if hidden {
VisibilityState::Hidden {
total: Duration::new(0, 0),
start: OffsetDateTime::now_utc(),
}
} else {
VisibilityState::Visible {
total_hidden: Duration::new(0, 0),
}
}
}
fn to_visible(&self) -> Self {
match self {
Self::Unknown => Self::Visible {
total_hidden: Duration::new(0, 0),
},
Self::Hidden { total, start } => {
let hidden = OffsetDateTime::now_utc() - *start;
let total_hidden = *total + hidden;
log::info!(
"Page is now visible; was hidden for {} second(s) ({} total)",
hidden.whole_seconds(),
total_hidden.whole_seconds(),
);
Self::Visible { total_hidden }
}
Self::Visible { .. } => self.clone(),
}
}
fn to_hidden(&self) -> Self {
match self {
Self::Unknown => Self::Hidden {
total: Duration::new(0, 0),
start: OffsetDateTime::now_utc(),
},
Self::Hidden { .. } => self.clone(),
Self::Visible {
total_hidden: hidden,
} => Self::Hidden {
total: *hidden,
start: OffsetDateTime::now_utc(),
},
}
}
}
impl AnalyticsState {
fn new() -> Self {
Self {
view_id: None,
start: OffsetDateTime::now_utc(),
scroll: 0.0,
visibility: VisibilityState::default(),
}
}
fn new_with_id(id: Uuid) -> Self {
Self {
view_id: Some(id),
start: OffsetDateTime::now_utc(),
scroll: get_position().clamp(0.0, 100.0),
visibility: VisibilityState::from_document(),
}
}
fn get_total_hidden(&self) -> Duration {
match self.visibility {
VisibilityState::Unknown => Duration::seconds(0),
VisibilityState::Visible {
total_hidden: hidden,
} => hidden,
VisibilityState::Hidden { total, start } => total + (OffsetDateTime::now_utc() - start),
}
}
fn get_duration(&self) -> f64 {
((OffsetDateTime::now_utc() - self.start) - self.get_total_hidden())
.abs()
.clamp(Duration::new(0, 0), Duration::hours(2))
.as_seconds_f64()
.round()
}
}
enum AnalyticsAction {
NewPageView(Uuid),
SetScroll(f64),
VisibilityChanged(bool),
}
impl Reducible for AnalyticsState {
type Action = AnalyticsAction;
fn reduce(self: Rc<Self>, action: Self::Action) -> Rc<Self> {
match action {
AnalyticsAction::NewPageView(id) => Self::new_with_id(id),
AnalyticsAction::SetScroll(distance) => Self {
scroll: self.scroll.max(distance),
..(*self).clone()
},
AnalyticsAction::VisibilityChanged(visible) => {
let visibility = if visible {
self.visibility.to_visible()
} else {
self.visibility.to_hidden()
};
Self {
visibility,
..(*self).clone()
}
}
}
.into()
}
}
#[derive(Serialize)]
struct AnalyticsBeaconData {
duration: f64,
scroll: f64,
}
impl From<&AnalyticsState> for AnalyticsBeaconData {
fn from(state: &AnalyticsState) -> Self {
Self {
duration: state.get_duration(),
scroll: 0.0_f64.max(state.scroll),
}
}
}
impl AnalyticsBeaconData {
pub async fn send(&self, url: &str) -> Result<(), JsValue> {
let body = serde_json::to_string(self).expect("JSON");
let res = send_beacon(url, &body)?;
JsFuture::from(res).await?;
Ok(())
}
}
fn get_analytics_host() -> String {
let mut host = std::option_env!("ANALYTICS_HOST")
.unwrap_or("https://analytics.blakerain.com")
.to_string();
if !host.ends_with('/') {
host.push('/');
}
host
}
#[function_component(Analytics)]
pub fn analytics() -> Html {
let host = get_analytics_host();
let state = use_reducer(AnalyticsState::new);
let location = use_location();
let send_analytics: UseAsyncHandle<(), &'static str> = {
let host = host.clone();
let state = state.clone();
use_async_with_options(
async move {
if should_not_track() {
log::info!("Do Not Track is enabled; analytics will not be sent");
return Ok(());
}
let data = AnalyticsData::capture();
let res = reqwest::Client::new()
.post(format!("{host}page_view"))
.json(&data)
.send()
.await
.map_err(|err| {
log::error!("Unable to send analytics data: {err:?}");
"Unable to send analytics data"
})?;
let AnalyticsResponse { id } =
res.json::<AnalyticsResponse>().await.map_err(|err| {
log::error!("Unable to parse analytics response: {err:?}");
"Unable to parse analytics response"
})?;
if let Some(id) = id {
log::info!(
"New page view '{id}' (for '{}')",
data.path.unwrap_or_default()
);
state.dispatch(AnalyticsAction::NewPageView(id));
} else {
log::warn!("Analytics record was not created; received no UUID");
}
Ok(())
},
UseAsyncOptions::enable_auto(),
)
};
let send_beacon: UseAsyncHandle<(), &'static str> = {
let host = host.clone();
let state = state.clone();
use_async(async move {
if should_not_track() {
log::info!("Do Not Track is enabled; analytics beacon will not be sent");
return Ok(());
}
if let Some(id) = state.view_id {
log::info!("Sending beacon for page view '{id}'");
AnalyticsBeaconData::from(&*state)
.send(&format!("{host}page_view/{id}"))
.await
.map_err(|err| {
log::error!("Failed to send analytics beacon: {err:?}");
"Unable to send analytics beacon"
})?;
}
Ok(())
})
};
{
let send_beacon = send_beacon.clone();
use_effect_with_deps(
move |loc| {
log::info!("Router location has changed: {loc:?}");
send_beacon.run();
send_analytics.run();
},
location.map(|loc| loc.path().to_string()),
)
}
{
let state = state.clone();
use_event_with_window("scroll", move |_: Event| {
let distance = get_position();
state.dispatch(AnalyticsAction::SetScroll(distance));
})
}
{
let state = state.clone();
let send_beacon = send_beacon.clone();
use_event_with_window("visibilitychange", move |_: Event| {
let hidden = gloo::utils::window().document().expect("document").hidden();
state.dispatch(AnalyticsAction::VisibilityChanged(!hidden));
if hidden {
send_beacon.run();
}
})
}
{
let send_beacon = send_beacon.clone();
use_event_with_window("pagehide", move |_: Event| {
send_beacon.run();
})
}
html! {}
}

View File

@ -2,6 +2,8 @@ use web_sys::{window, ScrollBehavior, ScrollToOptions};
use yew::{function_component, html, use_effect_with_deps, Children, Html, Properties}; use yew::{function_component, html, use_effect_with_deps, Children, Html, Properties};
use yew_router::prelude::use_location; use yew_router::prelude::use_location;
use crate::components::analytics::Analytics;
mod footer; mod footer;
pub mod goto_top; pub mod goto_top;
pub mod intersperse; pub mod intersperse;
@ -34,6 +36,7 @@ pub fn layout(props: &LayoutProps) -> Html {
<navigation::Navigation /> <navigation::Navigation />
{props.children.clone()} {props.children.clone()}
<footer::Footer /> <footer::Footer />
<Analytics />
</div> </div>
} }
} }