Initial analytics #36

Merged
BlakeRain merged 11 commits from analytics into main 2023-09-16 23:01:03 +00:00
44 changed files with 7588 additions and 72 deletions

View File

@ -7,7 +7,7 @@ on:
workflow_dispatch:
jobs:
deploy:
deploy-site:
runs-on: ubuntu-latest
steps:
- name: Checkout the Repository
@ -20,19 +20,14 @@ jobs:
with:
node-version: 18
- name: Configure Cache
uses: actions/cache@v2
with:
path: |
${{ github.workspace }}/node_modules
**/target
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/yarn.lock') }}
- name: Install Rust Toolchain
uses: dtolnay/rust-toolchain@stable
with:
targets: wasm32-unknown-unknown
- name: Setup Rust Cache
uses: Swatinem/rust-cache@v2
- name: Install Trunk
uses: jetli/trunk-action@v0.4.0
with:
@ -63,3 +58,49 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
DISTRIBUTION_ID: ${{ secrets.AWS_CLOUDFRONT_DISTRIBUTION_ID }}
deploy-analytics-lambda:
runs-on: ubuntu-latest
steps:
- name: Checkout the Repository
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Install the Stable Rust Toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Setup Rust Cache
uses: Swatinem/rust-cache@v2
- name: Install Zig Toolchain
uses: korandoru/setup-zig@v1
with:
zig-version: 0.10.0
- name: Install Cargo Lambda
uses: jaxxstorm/action-install-gh-release@v1.9.0
with:
repo: cargo-lambda/cargo-lambda
- name: Build Lambda Function
run: cargo lambda build --release --arm64 --output-format zip
- name: Configure AWS CLI
run: |
mkdir ~/.aws
echo "[default]" > ~/.aws/config
echo "credential_source = Environment" >> ~/.aws/config
- name: Deploy Lambda Function
run: |
aws lambda update-function-code --function-name analytics_lambda \
--zip-file "fileb://$(pwd)/target/lambda/analytics/bootstrap.zip" --publish
env:
AWS_DEFAULT_REGION: eu-west-1
AWS_ACCESS_KEY_ID: "${{ secrets.ANALYTICS_DEPLOYER_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.ANALYTICS_DEPLOYER_SECRET_ACCESS_KEY }}"

27
Cargo.lock generated
View File

@ -520,6 +520,19 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "getrandom"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi 0.11.0+wasi-snapshot-preview1",
"wasm-bindgen",
]
[[package]]
name = "gimli"
version = "0.27.3"
@ -1891,7 +1904,9 @@ dependencies = [
"enum-iterator",
"env_logger",
"gloo 0.10.0",
"gloo-events 0.2.0",
"include_dir",
"js-sys",
"log",
"macros",
"model",
@ -1902,6 +1917,7 @@ dependencies = [
"thiserror",
"time 0.3.26",
"tokio",
"uuid",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-logger",
@ -2295,6 +2311,17 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1"
[[package]]
name = "uuid"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d"
dependencies = [
"getrandom",
"serde",
"wasm-bindgen",
]
[[package]]
name = "vcpkg"
version = "0.2.15"

View File

@ -36,13 +36,17 @@ time = { version = "0.3", features = ["formatting"] }
async-trait = { version = "0.1" }
enum-iterator = { version = "1.4" }
gloo = { version = "0.10" }
gloo-events = { version = "0.2" }
include_dir = { version = "0.7" }
js-sys = { version = "0.3" }
log = { version = "0.4" }
reqwest = { version = "0.11", features = ["json"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0" }
thiserror = { version = "1.0" }
uuid = { version = "1.4", features = ["js", "serde"] }
wasm-bindgen = { version = "0.2" }
wasm-bindgen-futures = { version = "0.4" }
yew = { version = "0.20" }
yew-hooks = { version = "0.2" }
yew-router = { version = "0.17" }
@ -78,6 +82,7 @@ features = [
"LucideList",
"LucideMenu",
"LucidePencil",
"LucideRefreshCw",
"LucideRss",
"LucideX"
]
@ -85,11 +90,15 @@ features = [
[dependencies.web-sys]
version = "0.3"
features = [
"Blob",
"Document",
"DomRect",
"Element",
"HtmlSelectElement",
"IntersectionObserver",
"IntersectionObserverEntry",
"Navigator",
"Screen",
"ScrollBehavior",
"ScrollToOptions",
"Window"

View File

@ -1,3 +1,10 @@
[watch]
ignore = [
"analytics",
"cf",
"media"
]
[[hooks]]
stage = "pre_build"
command = "bash"

1
analytics/lambda/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

2622
analytics/lambda/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,38 @@
[package]
name = "analytics-lambda"
version = "0.1.0"
edition = "2021"
publish = false
[features]
local = []
[dependencies]
async-trait = { version = "0.1" }
env_logger = { version = "0.10" }
fernet = { version = "0.2" }
lambda_runtime = "0.8"
log = { version = "0.4" }
openssl = { version = "0.10", features = ["vendored"] }
poem = { version = "1.3" }
poem-lambda = { version = "1.3" }
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0" }
time = { version = "0.3", features = ["formatting", "serde"] }
tokio = { version = "1", features = ["full"] }
toml = { version = "0.8" }
tracing = { version = "0.1" }
tracing-subscriber = { version = "0.3", features = ["std", "env-filter", "tracing-log"] }
uuid = { version = "1.2", features = ["v4", "serde"] }
analytics-model = { path = "../model" }
[dependencies.sqlx]
version = "0.7"
features = [
"migrate",
"postgres",
"runtime-tokio-rustls",
"time",
"uuid"
]

26
analytics/lambda/local.sh Executable file
View File

@ -0,0 +1,26 @@
#!/bin/bash
set -eo pipefail
DB_CONTAINER_NAME="blakerain-analytics-db"
DB_CONTAINER_PORT=5101
# Stop the database docker container (if it is already running).
docker stop "$DB_CONTAINER_NAME" || true
# Start the local database, passing in defaults that correspond to those in 'local.toml'
# configuration file.
docker run --rm --name "$DB_CONTAINER_NAME" -d \
-e POSTGRES_USER=analytics_local \
-e POSTGRES_PASSWORD=analytics_local \
-e POSTGRES_DB=analytics_local \
-p $DB_CONTAINER_PORT:5432 \
postgres:alpine \
-c log_statement=all
# Make sure that 'cargo watch' is installed
cargo install cargo-watch
# Runt he language function, reloading any changes.
cargo watch -B 1 -L debug -- cargo run --features local --bin analytics

View File

@ -0,0 +1,10 @@
[db]
endpoint = "localhost"
port = 5101
username = "analytics_local"
password = "analytics_local"
dbname = "analytics_local"
[auth]
# This is a very poor key, but it shouldn't trigger GitHub
token_key = "YWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWE="

View File

@ -0,0 +1,61 @@
use analytics_lambda::{
config::{load_from_env, load_from_file},
endpoints::auth::AuthContext,
env::Env,
handlers::{
auth::{new_password, signin, validate_token},
page_view::{append_page_view, record_page_view},
query::query_month_view,
},
};
use analytics_model::MIGRATOR;
use lambda_runtime::Error;
use poem::{get, middleware, post, Endpoint, EndpointExt, Route};
async fn create() -> Result<impl Endpoint, Error> {
let config = if cfg!(feature = "local") {
load_from_file()
} else {
load_from_env().await
}?;
let env = Env::new(config).await;
MIGRATOR.run(&env.pool).await?;
Ok(Route::new()
.at("/page_view", post(record_page_view))
.at("/page_view/:id", post(append_page_view))
.at("/auth/sign_in", post(signin))
.at("/auth/new_password", post(new_password))
.at("/auth/validate", post(validate_token))
.at("/query/month/:year/:month", get(query_month_view))
.with(AuthContext::new(&["/auth", "/page_view"], env.clone()))
.with(middleware::Cors::new())
.with(middleware::Tracing)
.data(env))
}
#[tokio::main]
async fn main() -> Result<(), Error> {
let filter_layer = tracing_subscriber::filter::EnvFilter::builder()
.with_default_directive(tracing_subscriber::filter::LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_env_filter(filter_layer)
.without_time()
.with_ansi(cfg!(feature = "local"))
.init();
let endpoint = create().await?;
if cfg!(feature = "local") {
poem::Server::new(poem::listener::TcpListener::bind("127.0.0.1:3000"))
.run(endpoint)
.await?;
} else {
poem_lambda::run(endpoint).await?;
}
Ok(())
}

View File

@ -0,0 +1,72 @@
use analytics_lambda::{
config::{load_from_env, load_from_file},
env::Env,
};
use analytics_model::MIGRATOR;
use lambda_runtime::{run, service_fn, Error, LambdaEvent};
use serde::Deserialize;
use sqlx::PgPool;
async fn destroy(pool: &PgPool) -> sqlx::Result<()> {
sqlx::query("DROP SCHEMA public CASCADE")
.execute(pool)
.await?;
sqlx::query("CREATE SCHEMA public").execute(pool).await?;
sqlx::query("GRANT ALL ON SCHEMA public TO analytics")
.execute(pool)
.await?;
sqlx::query("GRANT ALL ON SCHEMA public TO public")
.execute(pool)
.await?;
Ok(())
}
#[derive(Deserialize)]
struct Options {
#[serde(default)]
destroy: bool,
}
#[tokio::main]
async fn main() -> Result<(), Error> {
let filter_layer = tracing_subscriber::filter::EnvFilter::builder()
.with_default_directive(tracing_subscriber::filter::LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_env_filter(filter_layer)
.without_time()
.with_ansi(cfg!(feature = "local"))
.init();
run(service_fn(
move |event: LambdaEvent<serde_json::Value>| async move {
let options: Options = serde_json::from_value(event.payload).expect("options");
let config = if cfg!(feature = "local") {
load_from_file()
} else {
load_from_env().await
}?;
let pool = Env::create_pool(&config).await;
if options.destroy {
log::info!("Destroying database");
destroy(&pool).await?;
}
log::info!("Running migrations");
MIGRATOR.run(&pool).await?;
Ok::<(), Error>(())
},
))
.await?;
Ok(())
}

View File

@ -0,0 +1,61 @@
use std::io::Read;
use fernet::Fernet;
use lambda_runtime::Error;
use serde::Deserialize;
#[derive(Debug, Deserialize)]
pub struct Config {
pub db: DbConfig,
pub auth: AuthConfig,
}
#[derive(Debug, Deserialize)]
pub struct DbConfig {
pub endpoint: String,
pub port: Option<u16>,
pub username: String,
pub password: String,
pub dbname: String,
}
#[derive(Debug, Deserialize)]
pub struct AuthConfig {
pub token_key: String,
}
pub fn load_from_file() -> Result<Config, Error> {
log::info!("Loading configuration from 'local.toml'");
let path = std::env::current_dir()?.join("local.toml");
if !path.is_file() {
log::error!("Local configuration file 'local.toml' not found");
return Err("Missing configuration file".into());
}
let mut file = std::fs::File::open(path)?;
let mut content = String::new();
file.read_to_string(&mut content)?;
let config = toml::from_str(&content)?;
Ok(config)
}
pub async fn load_from_env() -> Result<Config, Error> {
let endpoint = std::env::var("DATABASE_ENDPOINT")?;
let password = std::env::var("DATABASE_PASSWORD")?;
let token_key = std::env::var("TOKEN_KEY").unwrap_or_else(|_| {
log::info!("Unable to find TOKEN_KEY environment variable; falling back to generated key");
Fernet::generate_key()
});
let db = DbConfig {
endpoint,
port: None,
username: "analytics".to_string(),
password,
dbname: "analytics".to_string(),
};
let auth = AuthConfig { token_key };
Ok(Config { db, auth })
}

View File

@ -0,0 +1,118 @@
use analytics_model::user::User;
use async_trait::async_trait;
use fernet::Fernet;
use poem::{
error::InternalServerError,
http::StatusCode,
web::headers::{self, authorization::Bearer, HeaderMapExt},
Endpoint, Middleware, Request,
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::env::Env;
pub struct AuthContext {
skip_prefixes: Vec<String>,
env: Env,
}
impl AuthContext {
pub fn new(skip_prefixes: &[&str], env: Env) -> Self {
Self {
skip_prefixes: skip_prefixes.iter().map(ToString::to_string).collect(),
env,
}
}
}
impl<E: Endpoint> Middleware<E> for AuthContext {
type Output = AuthEndpoint<E>;
fn transform(&self, ep: E) -> Self::Output {
AuthEndpoint::new(self.skip_prefixes.clone(), self.env.clone(), ep)
}
}
pub struct AuthEndpoint<E: Endpoint> {
skip_prefixes: Vec<String>,
env: Env,
endpoint: E,
}
impl<E: Endpoint> AuthEndpoint<E> {
fn new(skip_prefixes: Vec<String>, env: Env, endpoint: E) -> Self {
Self {
skip_prefixes,
env,
endpoint,
}
}
}
#[async_trait]
impl<E: Endpoint> Endpoint for AuthEndpoint<E> {
type Output = E::Output;
async fn call(&self, mut request: Request) -> poem::Result<Self::Output> {
for skip_prefix in &self.skip_prefixes {
if request.uri().path().starts_with(skip_prefix) {
return self.endpoint.call(request).await;
}
}
// Make sure that we have an 'Authorization' header that has a 'Bearer' token.
let Some(auth) = request.headers().typed_get::<headers::Authorization<Bearer>>() else {
log::info!("Missing 'Authorization' header with 'Bearer' token");
return Err(poem::Error::from_status(StatusCode::UNAUTHORIZED));
};
// Ensure that we can decrypt the token using the provided Fernet key.
let Token { user_id } = match Token::decode(&self.env.fernet, auth.token()) {
Some(token) => token,
None => {
log::error!("Failed to decode authentication token");
return Err(poem::Error::from_status(StatusCode::UNAUTHORIZED));
}
};
// If the user no longer exists, then a simple 401 will suffice.
let Some(user) = sqlx::query_as::<_, User>("SELECT * FROM users WHERE id = $1")
.bind(user_id).fetch_optional(&self.env.pool).await.map_err(InternalServerError)? else {
log::error!("User '{user_id}' no longer exists");
return Err(poem::Error::from_status(StatusCode::UNAUTHORIZED));
};
// Make sure that the user is still enabled.
if !user.enabled {
log::error!("User '{user_id}' is not enabled");
return Err(poem::Error::from_status(StatusCode::FORBIDDEN));
}
// Store the authenticated user in the request for retrieval by handlers.
request.set_data(user);
self.endpoint.call(request).await
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Token {
pub user_id: Uuid,
}
impl Token {
pub fn new(user_id: Uuid) -> Self {
Self { user_id }
}
pub fn encode(&self, fernet: &Fernet) -> String {
let plain = serde_json::to_string(self).expect("Unable to JSON encode token");
fernet.encrypt(plain.as_bytes())
}
pub fn decode(fernet: &Fernet, encoded: &str) -> Option<Self> {
let plain = fernet.decrypt(encoded).ok()?;
serde_json::from_slice(&plain).ok()
}
}

View File

@ -0,0 +1,65 @@
use std::ops::Deref;
use std::sync::Arc;
use std::time::Duration;
use fernet::Fernet;
use log::LevelFilter;
use sqlx::postgres::PgConnectOptions;
use sqlx::ConnectOptions;
use crate::config::Config;
pub struct Env {
inner: Arc<Inner>,
}
impl Clone for Env {
fn clone(&self) -> Self {
Self {
inner: Arc::clone(&self.inner),
}
}
}
impl Deref for Env {
type Target = Inner;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
pub struct Inner {
pub pool: sqlx::PgPool,
pub fernet: Fernet,
}
impl Env {
pub async fn create_pool(config: &Config) -> sqlx::PgPool {
let mut connection_opts = PgConnectOptions::new()
.host(&config.db.endpoint)
.username(&config.db.username)
.password(&config.db.password)
.database(&config.db.dbname)
.log_statements(LevelFilter::Debug)
.log_slow_statements(LevelFilter::Warn, Duration::from_secs(1));
if let Some(port) = config.db.port {
connection_opts = connection_opts.port(port);
}
sqlx::PgPool::connect_with(connection_opts).await.unwrap()
}
pub async fn new(config: Config) -> Self {
let pool = Self::create_pool(&config).await;
let inner = Inner {
pool,
fernet: Fernet::new(&config.auth.token_key).expect("valid fernet key"),
};
Self {
inner: Arc::new(inner),
}
}
}

View File

@ -0,0 +1,3 @@
pub mod auth;
pub mod page_view;
pub mod query;

View File

@ -0,0 +1,111 @@
use analytics_model::user::{authenticate, reset_password, User};
use poem::{
error::InternalServerError,
handler,
web::{Data, Json},
};
use serde::{Deserialize, Serialize};
use crate::{endpoints::auth::Token, env::Env};
#[derive(Deserialize)]
pub struct SignInBody {
username: String,
password: String,
}
#[derive(Serialize)]
#[serde(tag = "type")]
pub enum SignInResponse {
InvalidCredentials,
NewPassword,
Successful { token: String },
}
#[derive(Deserialize)]
pub struct NewPasswordBody {
username: String,
#[serde(rename = "oldPassword")]
old_password: String,
#[serde(rename = "newPassword")]
new_password: String,
}
#[derive(Deserialize)]
pub struct ValidateTokenBody {
token: String,
}
#[derive(Serialize)]
#[serde(tag = "type")]
pub enum ValidateTokenResponse {
Invalid,
Valid { token: String },
}
#[handler]
pub async fn signin(
env: Data<&Env>,
Json(SignInBody { username, password }): Json<SignInBody>,
) -> poem::Result<Json<SignInResponse>> {
let Some(user) = authenticate(&env.pool, &username, &password).await.map_err(InternalServerError)? else {
return Ok(Json(SignInResponse::InvalidCredentials));
};
if user.reset_password {
return Ok(Json(SignInResponse::NewPassword));
}
let token = Token::new(user.id);
let token = token.encode(&env.fernet);
Ok(Json(SignInResponse::Successful { token }))
}
#[handler]
pub async fn new_password(
env: Data<&Env>,
Json(NewPasswordBody {
username,
old_password,
new_password,
}): Json<NewPasswordBody>,
) -> poem::Result<Json<SignInResponse>> {
let Some(user) = authenticate(&env.pool, &username, &old_password).await.map_err(InternalServerError)? else {
return Ok(Json(SignInResponse::InvalidCredentials));
};
let Some(user) = reset_password(&env.pool, user.id, new_password).await.map_err(InternalServerError)? else {
return Ok(Json(SignInResponse::InvalidCredentials));
};
let token = Token::new(user.id);
let token = token.encode(&env.fernet);
Ok(Json(SignInResponse::Successful { token }))
}
#[handler]
pub async fn validate_token(
env: Data<&Env>,
Json(ValidateTokenBody { token }): Json<ValidateTokenBody>,
) -> poem::Result<Json<ValidateTokenResponse>> {
let Some(Token { user_id }) = Token::decode(&env.fernet, &token) else {
log::error!("Failed to decode authentication token");
return Ok(Json(ValidateTokenResponse::Invalid));
};
let Some(user) = sqlx::query_as::<_, User>("SELECT * FROM users WHERE id = $1")
.bind(user_id).fetch_optional(&env.pool).await.map_err(InternalServerError)? else {
log::error!("User '{user_id}' no longer exists");
return Ok(Json(ValidateTokenResponse::Invalid));
};
if !user.enabled {
log::error!("User '{user_id}' is not enabled");
return Ok(Json(ValidateTokenResponse::Invalid));
}
let token = Token::new(user.id);
let token = token.encode(&env.fernet);
Ok(Json(ValidateTokenResponse::Valid { token }))
}

View File

@ -0,0 +1,92 @@
use analytics_model::view::{self, create_page_view, PageView};
use poem::{
handler,
web::{Data, Json, Path},
};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use crate::env::Env;
#[derive(Deserialize)]
pub struct PageViewBody {
path: Option<String>,
ua: Option<String>,
vw: Option<i32>,
vh: Option<i32>,
sw: Option<i32>,
sh: Option<i32>,
tz: Option<String>,
rf: Option<String>,
}
#[derive(Serialize)]
pub struct PageViewResponse {
id: Option<Uuid>,
}
#[handler]
pub async fn record_page_view(
env: Data<&Env>,
Json(PageViewBody {
path,
ua,
vw,
vh,
sw,
sh,
tz,
rf,
}): Json<PageViewBody>,
) -> poem::Result<Json<PageViewResponse>> {
let id = if let Some(path) = path {
let id = Uuid::new_v4();
let view = PageView {
id,
path,
time: OffsetDateTime::now_utc(),
user_agent: ua,
viewport_width: vw,
viewport_height: vh,
screen_width: sw,
screen_height: sh,
timezone: tz,
referrer: rf,
beacon: false,
duration: None,
scroll: None,
};
if let Err(err) = create_page_view(&env.pool, view).await {
log::error!("Failed to record page view: {err:?}");
None
} else {
Some(id)
}
} else {
log::info!("Ignoring request for pageview image with no path");
None
};
Ok(Json(PageViewResponse { id }))
}
#[derive(Deserialize)]
pub struct AppendPageViewBody {
duration: f64,
scroll: f64,
}
#[handler]
pub async fn append_page_view(
env: Data<&Env>,
Path(id): Path<Uuid>,
Json(AppendPageViewBody { duration, scroll }): Json<AppendPageViewBody>,
) -> poem::Result<Json<()>> {
if let Err(err) = view::append_page_view(&env.pool, id, duration, scroll).await {
log::error!("Failed to append page view: {err:?}");
}
Ok(Json(()))
}

View File

@ -0,0 +1,70 @@
use analytics_model::{user::User, view::PageViewsMonth};
use poem::{
error::InternalServerError,
handler,
web::{Data, Json, Path},
};
use serde::Serialize;
use crate::env::Env;
#[derive(Debug, Clone, sqlx::FromRow, Serialize)]
pub struct PageViewsPathCount {
pub path: String,
pub count: i64,
pub beacons: i64,
pub avg_duration: f64,
pub avg_scroll: f64,
}
#[derive(Debug, Clone, Serialize)]
pub struct PageViewsMonthResult {
pub site: PageViewsPathCount,
pub views: Vec<PageViewsMonth>,
pub paths: Vec<PageViewsPathCount>,
}
#[handler]
pub async fn query_month_view(
env: Data<&Env>,
_: Data<&User>,
Path((year, month)): Path<(i32, i32)>,
) -> poem::Result<Json<PageViewsMonthResult>> {
let views = sqlx::query_as::<_, PageViewsMonth>(
"SELECT * FROM page_views_month WHERE path = $1 AND year = $2 AND month = $3 ORDER BY day",
)
.bind("")
.bind(year)
.bind(month)
.fetch_all(&env.pool)
.await
.map_err(InternalServerError)?;
let mut paths = sqlx::query_as::<_, PageViewsPathCount>(
"SELECT path,
SUM(count) AS count,
SUM(total_beacon) AS beacons,
SUM(total_duration) / SUM(total_beacon) AS avg_duration,
SUM(total_scroll) / SUM(total_beacon) AS avg_scroll
FROM page_views_month WHERE year = $1 AND month = $2 GROUP BY path",
)
.bind(year)
.bind(month)
.fetch_all(&env.pool)
.await
.map_err(InternalServerError)?;
let site = if let Some(index) = paths.iter().position(|count| count.path.is_empty()) {
paths.swap_remove(index)
} else {
PageViewsPathCount {
path: String::new(),
count: 0,
beacons: 0,
avg_duration: 0.0,
avg_scroll: 0.0,
}
};
Ok(Json(PageViewsMonthResult { site, views, paths }))
}

View File

@ -0,0 +1,7 @@
pub mod config;
pub mod env;
pub mod handlers;
pub mod endpoints {
pub mod auth;
}

1
analytics/model/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

1761
analytics/model/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,23 @@
[package]
name = "analytics-model"
version = "0.1.0"
edition = "2021"
publish = false
[dependencies]
log = { version = "0.4" }
pbkdf2 = { version = "0.12", features = ["simple"] }
rand_core = { version = "0.6", features = ["std"] }
serde = { version = "1.0", features = ["derive"] }
time = { version = "0.3", features = ["formatting", "serde"] }
uuid = { version = "1.2", features = ["v4", "serde"] }
[dependencies.sqlx]
version = "0.7"
features = [
"migrate",
"postgres",
"runtime-tokio-rustls",
"time",
"uuid"
]

View File

@ -0,0 +1,61 @@
CREATE TABLE IF NOT EXISTS page_views (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
path TEXT NOT NULL,
time TIMESTAMP WITH TIME ZONE NOT NULL,
user_agent TEXT,
viewport_width INTEGER,
viewport_height INTEGER,
screen_width INTEGER,
screen_height INTEGER,
timezone TEXT,
referrer TEXT,
beacon BOOLEAN NOT NULL,
duration FLOAT8,
scroll FLOAT8
);
CREATE TABLE IF NOT EXISTS page_views_day (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
path TEXT NOT NULL,
year INTEGER NOT NULL,
month INTEGER NOT NULL,
day INTEGER NOT NULL,
hour INTEGER NOT NULL,
count INTEGER NOT NULL,
total_beacon INTEGER NOT NULL,
total_scroll FLOAT8 NOT NULL,
total_duration FLOAT8 NOT NULL,
CONSTRAINT unique_page_views_day
UNIQUE (path, year, month, day, hour)
);
CREATE TABLE IF NOT EXISTS page_views_week (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
path TEXT NOT NULL,
year INTEGER NOT NULL,
week INTEGER NOT NULL,
dow INTEGER NOT NULL,
count INTEGER NOT NULL,
total_beacon INTEGER NOT NULL,
total_scroll FLOAT8 NOT NULL,
total_duration FLOAT8 NOT NULL,
CONSTRAINT unique_page_views_week
UNIQUE (path, year, week, dow)
);
CREATE TABLE IF NOT EXISTS page_views_month (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
path TEXT NOT NULL,
year INTEGER NOT NULL,
month INTEGER NOT NULL,
day INTEGER NOT NULL,
count INTEGER NOT NULL,
total_beacon INTEGER NOT NULL,
total_scroll FLOAT8 NOT NULL,
total_duration FLOAT8 NOT NULL,
CONSTRAINT unique_page_views_month
UNIQUE (path, year, month, day)
);

View File

@ -0,0 +1,14 @@
CREATE TABLE IF NOT EXISTS users (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
username TEXT NOT NULL,
password TEXT NOT NULL,
enabled BOOLEAN NOT NULL,
reset_password BOOLEAN NOT NULL,
CONSTRAINT unique_username
UNIQUE (username)
);
-- Create an intial user that has a temporary password. The password is: admin
INSERT INTO users (username, password, enabled, reset_password)
VALUES('admin', '$pbkdf2-sha256$i=600000,l=32$V62SYtsc1HWC2hV3jbevjg$OrOHoTwo1YPmNrPUnAUy3Vfg4Lrw90mxOTTISVHmjnk', TRUE, TRUE);

View File

@ -0,0 +1,4 @@
pub mod user;
pub mod view;
pub static MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!();

View File

@ -0,0 +1,72 @@
use pbkdf2::{
password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString},
Pbkdf2,
};
use rand_core::OsRng;
use serde::Serialize;
use sqlx::PgPool;
use uuid::Uuid;
#[derive(Debug, Clone, sqlx::FromRow, Serialize)]
pub struct User {
pub id: Uuid,
pub username: String,
#[serde(skip)]
pub password: String,
pub enabled: bool,
pub reset_password: bool,
}
pub async fn authenticate(
pool: &PgPool,
username: &str,
password: &str,
) -> sqlx::Result<Option<User>> {
let user: User = if let Some(user) = sqlx::query_as("SELECT * FROM users WHERE username = $1")
.bind(username)
.fetch_optional(pool)
.await?
{
user
} else {
log::warn!("User not found with username '{username}'");
return Ok(None);
};
let parsed_hash = PasswordHash::new(&user.password).expect("valid password hash");
if let Err(err) = Pbkdf2.verify_password(password.as_bytes(), &parsed_hash) {
log::error!(
"Incorrect password for user '{username}' ('{}'): {err:?}",
user.id
);
return Ok(None);
}
if !user.enabled {
log::error!("User '{username}' ('{}') is disabled", user.id);
return Ok(None);
}
Ok(Some(user))
}
pub async fn reset_password(
pool: &PgPool,
id: Uuid,
new_password: String,
) -> sqlx::Result<Option<User>> {
let salt = SaltString::generate(&mut OsRng);
let password = Pbkdf2
.hash_password(new_password.as_bytes(), &salt)
.expect("valid password hash")
.to_string();
sqlx::query_as(
"UPDATE users SET password = $1, reset_password = FALSE WHERE id = $2 RETURNING *",
)
.bind(password)
.bind(id)
.fetch_optional(pool)
.await
}

331
analytics/model/src/view.rs Normal file
View File

@ -0,0 +1,331 @@
use serde::Serialize;
use sqlx::PgPool;
use time::OffsetDateTime;
use uuid::Uuid;
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct PageView {
pub id: Uuid,
pub path: String,
pub time: OffsetDateTime,
pub user_agent: Option<String>,
pub viewport_width: Option<i32>,
pub viewport_height: Option<i32>,
pub screen_width: Option<i32>,
pub screen_height: Option<i32>,
pub timezone: Option<String>,
pub referrer: Option<String>,
pub beacon: bool,
pub duration: Option<f64>,
pub scroll: Option<f64>,
}
#[derive(Debug, Clone, sqlx::FromRow, Serialize)]
pub struct PageViewsDay {
pub id: Uuid,
pub path: String,
pub year: i32,
pub month: i32,
pub day: i32,
pub hour: i32,
pub count: i32,
pub total_beacon: i32,
pub total_scroll: f64,
pub total_duration: f64,
}
#[derive(Debug, Clone, sqlx::FromRow, Serialize)]
pub struct PageViewsWeek {
pub id: Uuid,
pub path: String,
pub year: i32,
pub week: i32,
pub dow: i32,
pub count: i32,
pub total_beacon: i32,
pub total_scroll: f64,
pub total_duration: f64,
}
#[derive(Debug, Clone, sqlx::FromRow, Serialize)]
pub struct PageViewsMonth {
pub id: Uuid,
pub path: String,
pub year: i32,
pub month: i32,
pub day: i32,
pub count: i32,
pub total_beacon: i32,
pub total_scroll: f64,
pub total_duration: f64,
}
pub async fn create_page_view(pool: &PgPool, view: PageView) -> sqlx::Result<()> {
sqlx::query(
"INSERT INTO page_views
(id, path, time, user_agent,
viewport_width, viewport_height,
screen_width, screen_height,
timezone, referrer,
beacon, duration, scroll)
VALUES ($1, $2, $3, $4,
$5, $6,
$7, $8,
$9, $10,
$11, $12, $13)",
)
.bind(view.id)
.bind(&view.path)
.bind(view.time)
.bind(view.user_agent)
.bind(view.viewport_width)
.bind(view.viewport_height)
.bind(view.screen_width)
.bind(view.screen_height)
.bind(view.timezone)
.bind(view.referrer)
.bind(view.beacon)
.bind(view.duration)
.bind(view.scroll)
.execute(pool)
.await?;
update_count_accumulators(pool, &view.path, view.time).await?;
update_count_accumulators(pool, "", view.time).await?;
Ok(())
}
async fn update_count_accumulators(
pool: &PgPool,
path: &str,
time: OffsetDateTime,
) -> sqlx::Result<()> {
sqlx::query(
"
INSERT INTO page_views_day
(path, year, month, day, hour, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, $5, 1, 0, 0, 0)
ON CONFLICT (path, year, month, day, hour)
DO UPDATE SET
count = page_views_day.count + 1
",
)
.bind(path)
.bind(time.year())
.bind(time.month() as i32)
.bind(time.day() as i32)
.bind(time.hour() as i32)
.execute(pool)
.await?;
sqlx::query(
"
INSERT INTO page_views_week
(path, year, week, dow, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, 1, 0, 0, 0)
ON CONFLICT (path, year, week, dow)
DO UPDATE SET
count = page_views_week.count + 1
",
)
.bind(path)
.bind(time.year())
.bind(time.iso_week() as i32)
.bind(time.weekday().number_days_from_sunday() as i32)
.execute(pool)
.await?;
sqlx::query(
"
INSERT INTO page_views_month
(path, year, month, day, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, 1, 0, 0, 0)
ON CONFLICT (path, year, month, day)
DO UPDATE SET
count = page_views_month.count + 1
",
)
.bind(path)
.bind(time.year())
.bind(time.month() as i32)
.bind(time.day() as i32)
.execute(pool)
.await?;
Ok(())
}
struct Accumulators {
duration: f64,
scroll: f64,
count_delta: i32,
duration_delta: f64,
scroll_delta: f64,
}
async fn update_beacon_accumulators(
pool: &PgPool,
path: &str,
time: OffsetDateTime,
Accumulators {
duration,
scroll,
count_delta,
duration_delta,
scroll_delta,
}: Accumulators,
) -> sqlx::Result<()> {
sqlx::query(
"
INSERT INTO page_views_day
(path, year, month, day, hour, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, $5, 1, 1, $6, $7)
ON CONFLICT (path, year, month, day, hour)
DO UPDATE SET
total_beacon = page_views_day.total_beacon + $8,
total_scroll = page_views_day.total_scroll + $9,
total_duration = page_views_day.total_duration + $10
",
)
.bind(path)
.bind(time.year())
.bind(time.month() as i32)
.bind(time.day() as i32)
.bind(time.hour() as i32)
.bind(scroll)
.bind(duration)
.bind(count_delta)
.bind(scroll_delta)
.bind(duration_delta)
.execute(pool)
.await?;
sqlx::query(
"
INSERT INTO page_views_week
(path, year, week, dow, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, 1, 1, $5, $6)
ON CONFLICT (path, year, week, dow)
DO UPDATE SET
total_beacon = page_views_week.total_beacon + $7,
total_scroll = page_views_week.total_scroll + $8,
total_duration = page_views_week.total_duration + $9
",
)
.bind(path)
.bind(time.year())
.bind(time.iso_week() as i32)
.bind(time.weekday().number_days_from_sunday() as i32)
.bind(scroll)
.bind(duration)
.bind(count_delta)
.bind(scroll_delta)
.bind(duration_delta)
.execute(pool)
.await?;
sqlx::query(
"
INSERT INTO page_views_month
(path, year, month, day, count, total_beacon, total_scroll, total_duration)
VALUES
($1, $2, $3, $4, 1, 1, $5, $6)
ON CONFLICT (path, year, month, day)
DO UPDATE SET
total_beacon = page_views_month.total_beacon + $7,
total_scroll = page_views_month.total_scroll + $8,
total_duration = page_views_month.total_duration + $9
",
)
.bind(path)
.bind(time.year())
.bind(time.month() as i32)
.bind(time.day() as i32)
.bind(scroll)
.bind(duration)
.bind(count_delta)
.bind(scroll_delta)
.bind(duration_delta)
.execute(pool)
.await?;
Ok(())
}
pub async fn append_page_view(
pool: &PgPool,
uuid: Uuid,
duration: f64,
scroll: f64,
) -> sqlx::Result<()> {
let view = match sqlx::query_as::<_, PageView>("SELECT * FROM page_views WHERE id = $1")
.bind(uuid)
.fetch_optional(pool)
.await?
{
Some(view) => view,
None => {
log::warn!("Ignoring append for page view '{uuid}' which does not exist");
return Ok(());
}
};
// If the beacon has already been received, we want to subtract the last recorded duration and
// scroll distance from our totals before we then add the new duration and scroll distance.
let (count_delta, duration_delta, scroll_delta) = if view.beacon {
(
0,
duration - view.duration.unwrap_or(0.0),
scroll - view.scroll.unwrap_or(0.0),
)
} else {
(1, duration, scroll)
};
// Update the page view record with the received duration and scroll distance, and set the
// beacon flag so we know we've recorded this beacon data into our accumulators.
sqlx::query("UPDATE page_views SET duration = $1, scroll = $2, beacon = $3 WHERE id = $4")
.bind(duration)
.bind(scroll)
.bind(true)
.bind(uuid)
.execute(pool)
.await?;
// Update the accumulated statistics for the page view path, and the site overall.
update_beacon_accumulators(
pool,
&view.path,
view.time,
Accumulators {
duration,
scroll,
count_delta,
duration_delta,
scroll_delta,
},
)
.await?;
update_beacon_accumulators(
pool,
"",
view.time,
Accumulators {
duration,
scroll,
count_delta,
duration_delta,
scroll_delta,
},
)
.await?;
Ok(())
}

312
cf/analytics.yaml Normal file
View File

@ -0,0 +1,312 @@
#
# analytics.yaml
#
# CloudFormation template for site analytics resources.
#
Description: Site analytics
Parameters:
DomainName:
Type: String
Description: The domain name to use
Default: blakerain.com
HostedZoneId:
Type: String
Description: The hosted zone for the domain
Default: Z2C0W1IB1QO9DO
Outputs:
AnalyticsLambdaDeployerAccessKeyId:
Value: !Ref AnalyticsLambdaDeployerAccessKey
AnalyticsLambdaDeployerSecretAccessKey:
Value: !GetAtt AnalyticsLambdaDeployerAccessKey.SecretAccessKey
Resources:
AnalyticsVpc:
Type: AWS::EC2::VPC
Properties:
CidrBlock: 10.0.0.0/16
EnableDnsHostnames: true
EnableDnsSupport: true
AnalyticsSubnet1:
Type: AWS::EC2::Subnet
Properties:
VpcId: !Ref AnalyticsVpc
AvailabilityZone: eu-west-1a
CidrBlock: 10.0.4.0/24
AnalyticsSubnet2:
Type: AWS::EC2::Subnet
Properties:
VpcId: !Ref AnalyticsVpc
AvailabilityZone: eu-west-1b
CidrBlock: 10.0.5.0/24
AnalyticsLambdaSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
VpcId: !Ref AnalyticsVpc
GroupDescription: Lambda security group
AnalyticsDatabaseSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
VpcId: !Ref AnalyticsVpc
GroupDescription: Database security group
SecurityGroupIngress:
- IpProtocol: tcp
FromPort: "5432"
ToPort: "5432"
SourceSecurityGroupId: !Ref AnalyticsLambdaSecurityGroup
Description: Allow inbound PostgreSQL traffic from Lambda functions
AnalyticsDatabaseSubnetGroup:
Type: AWS::RDS::DBSubnetGroup
Properties:
DBSubnetGroupName: analytics_dbsubnet_group
DBSubnetGroupDescription: Analytics database subnet group
SubnetIds:
- !Ref AnalyticsSubnet1
- !Ref AnalyticsSubnet2
AnalyticsDatabase:
Type: AWS::RDS::DBInstance
Properties:
AllocatedStorage: "20"
AutoMinorVersionUpgrade: true
AvailabilityZone: eu-west-1a
BackupRetentionPeriod: 7
DBInstanceClass: db.t4g.micro
DBName: analytics
DBSubnetGroupName: !Ref AnalyticsDatabaseSubnetGroup
Engine: postgres
MasterUsername: analytics
MasterUserPassword: "{{resolve:ssm:analytics_database_password}}"
MaxAllocatedStorage: 250
MultiAZ: false
Port: "5432"
PreferredBackupWindow: "03:00-04:00"
PreferredMaintenanceWindow: "Sun:00:00-Sun:02:00"
PubliclyAccessible: false
VPCSecurityGroups:
- !Ref AnalyticsDatabaseSecurityGroup
AnalyticsLambdaLogGroup:
Type: AWS::Logs::LogGroup
Properties:
RetentionInDays: 365
LogGroupName:
Fn::Join:
- "/"
- - ""
- aws
- lambda
- !Ref AnalyticsLambda
AnalyticsLambdaLoggingPolicy:
Type: AWS::IAM::Policy
Properties:
PolicyName: analytics_lambda_logging_policy
PolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Action:
- "logs:CreateLogStream"
- "logs:PutLogEvents"
Resource: !GetAtt AnalyticsLambdaLogGroup.Arn
Roles:
- !Ref AnalyticsLambdaRole
AnalyticsLambdaPolicy:
Type: AWS::IAM::Policy
Properties:
PolicyName: analytics_lambda_policy
PolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Action:
- "ec2:CreateNetworkInterface"
- "ec2:DeleteNetworkInterface"
- "ec2:DescribeNetworkInterfaces"
- "ec2:AssignPrivateIpAddresses"
- "ec2:UnassignPrivateIpAddresses"
Resource: "*"
- Effect: Allow
Action:
- "logs:CreateLogGroup"
Resource:
Fn::Sub: "arn:aws:logs:${AWS::Region}:${AWS::AccountId}::*"
Roles:
- !Ref AnalyticsLambdaRole
AnalyticsLambdaRole:
Type: AWS::IAM::Role
Properties:
RoleName: analytics_lambda_role
AssumeRolePolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Principal:
Service: lambda.amazonaws.com
Action: "sts:AssumeRole"
AnalyticsLambda:
Type: AWS::Lambda::Function
Properties:
FunctionName: analytics_lambda
Description: "Site analytics"
Handler: unused
Architectures:
- arm64
MemorySize: 512
Runtime: provided.al2
Timeout: 360
Role: !GetAtt AnalyticsLambdaRole.Arn
Code:
S3Bucket: private.s3.blakerain.com
S3Key: default-function.zip
Environment:
Variables:
RUST_LOG: info
DATABASE_ENDPOINT: !GetAtt AnalyticsDatabase.Endpoint.Address
DATABASE_PASSWORD: "{{resolve:ssm:analytics_database_password}}"
VpcConfig:
SubnetIds:
- !Ref AnalyticsSubnet1
- !Ref AnalyticsSubnet2
SecurityGroupIds:
- !Ref AnalyticsLambdaSecurityGroup
DependsOn:
- AnalyticsLambdaPolicy
AnalyticsLambdaDeployer:
Type: AWS::IAM::User
Properties:
UserName: analytics_lambda_deployer
AnalyticsLambdaDeployerAccessKey:
Type: AWS::IAM::AccessKey
Properties:
UserName: !Ref AnalyticsLambdaDeployer
AnalyticsApi:
Type: AWS::ApiGatewayV2::Api
Properties:
Name: blakerain_analytics_api
Description: Analytics API
ProtocolType: HTTP
AnalyticsApiIntegration:
Type: AWS::ApiGatewayV2::Integration
Properties:
ApiId: !Ref AnalyticsApi
ConnectionType: INTERNET
IntegrationMethod: POST
IntegrationType: AWS_PROXY
TimeoutInMillis: 30000
PayloadFormatVersion: "2.0"
IntegrationUri: !GetAtt AnalyticsLambda.Arn
AnalyticsApiRouteDefault:
Type: AWS::ApiGatewayV2::Route
Properties:
ApiId: !Ref AnalyticsApi
ApiKeyRequired: false
RouteKey: "$default"
Target:
Fn::Join:
- "/"
- - integrations
- !Ref AnalyticsApiIntegration
AnalyticsApiLogGroup:
Type: AWS::Logs::LogGroup
Properties:
RetentionInDays: 365
LogGroupName: "/aws/apigateway/blakerain_analytics_api"
AnalyticsApiStage:
Type: AWS::ApiGatewayV2::Stage
Properties:
ApiId: !Ref AnalyticsApi
StageName: "$default"
AutoDeploy: true
AccessLogSettings:
DestinationArn: !GetAtt AnalyticsApiLogGroup.Arn
Format: '$context.identity.sourceIp - - [$context.requestTime] "$context.httpMethod $context.routeKey $context.protocol" $context.status $context.responseLength $context.requestId'
AnalyticsApiPermission:
Type: AWS::Lambda::Permission
Properties:
Action: "lambda:InvokeFunction"
FunctionName: !GetAtt AnalyticsLambda.Arn
Principal: apigateway.amazonaws.com
SourceArn:
Fn::Join:
- ":"
- - "arn:aws:execute-api"
- !Sub "${AWS::Region}"
- !Sub "${AWS::AccountId}"
- Fn::Join:
- "/"
- - !Ref AnalyticsApi
- "*"
- "$default"
AnalyticsApiDomain:
Type: AWS::ApiGatewayV2::DomainName
Properties:
DomainName:
Fn::Join:
- "."
- - analytics
- !Ref DomainName
DomainNameConfigurations:
- CertificateArn: !Ref AnalyticsApiCertificate
EndpointType: REGIONAL
SecurityPolicy: TLS_1_2
AnalyticsApiDomainMapping:
Type: AWS::ApiGatewayV2::ApiMapping
Properties:
ApiId: !Ref AnalyticsApi
DomainName: !Ref AnalyticsApiDomain
Stage: !Ref AnalyticsApiStage
AnalyticsApiCertificate:
Type: AWS::CertificateManager::Certificate
Properties:
DomainName:
Fn::Join:
- "."
- - analytics
- !Ref DomainName
ValidationMethod: DNS
DomainValidationOptions:
- DomainName:
Fn::Join:
- "."
- - analytics
- !Ref DomainName
HostedZoneId: !Ref HostedZoneId
AnalyticsApiRecordSet:
Type: AWS::Route53::RecordSet
Properties:
HostedZoneId: !Ref HostedZoneId
Name:
Fn::Join:
- "."
- - analytics
- !Ref DomainName
Type: A
AliasTarget:
HostedZoneId: !GetAtt AnalyticsApiDomain.RegionalHostedZoneId
DNSName: !GetAtt AnalyticsApiDomain.RegionalDomainName

41
public/analytics.js Normal file
View File

@ -0,0 +1,41 @@
export function getTimezone() {
try {
return Intl.DateTimeFormat().resolvedOptions().timeZone;
} catch {
return null;
}
}
export function getReferrer() {
return document.referrer
.replace(/^https?:\/\/((m|l|w{2,3})([0-9]+)?\.)?([^?#]+)(.*)$/, "$4")
.replace(/^([^/]+)$/, "$1");
}
export function getPosition() {
try {
const doc = window.document.documentElement;
const body = window.document.body;
return Math.min(
100,
5 *
Math.round(
(100 * (doc.scrollTop + doc.clientHeight)) / body.scrollHeight / 5
)
);
} catch {
return 0;
}
}
export function sendBeacon(url, body) {
return fetch(url, {
keepalive: true,
method: "POST",
headers: {
"content-type": "application/json",
},
body,
});
}

View File

@ -90,6 +90,11 @@ impl Env {
}
async fn render_route(&self, route: Route) -> String {
assert!(
route.shoud_render(),
"Route {route:?} should not be rendered"
);
let head = HeadContext::default();
let render = {
@ -135,7 +140,11 @@ struct RenderRoute {
fn collect_routes() -> Vec<RenderRoute> {
enum_iterator::all::<Route>()
.map(|route| {
.filter_map(|route| {
if !route.should_render() {
return None;
}
let path = route.to_path();
let path = if path == "/" {
PathBuf::from("index.html")
@ -143,7 +152,7 @@ fn collect_routes() -> Vec<RenderRoute> {
PathBuf::from(&path[1..]).with_extension("html")
};
RenderRoute { route, path }
Some(RenderRoute { route, path })
})
.collect()
}

View File

@ -5,7 +5,7 @@ fn main() {
wasm_logger::init(wasm_logger::Config::default());
log::info!(
"blakerain.com {}, {} {} build",
"blakerain.com {}, {} {} build, compiled {}",
env!("CARGO_PKG_VERSION"),
if cfg!(debug_assertions) {
"debug"
@ -16,11 +16,10 @@ fn main() {
"hydration"
} else {
"standard"
}
},
env!("BUILD_TIME")
);
log::info!("Compiled {}", env!("BUILD_TIME"));
let app = yew::Renderer::<App>::new();
#[cfg(feature = "hydration")]
@ -31,7 +30,7 @@ fn main() {
#[cfg(not(feature = "hydration"))]
{
log::info!("Rendering application");
log::info!("Mounting application");
app.render();
}
}

View File

@ -1,5 +1,7 @@
pub mod analytics;
pub mod blog;
pub mod content;
pub mod display;
pub mod head;
pub mod layout;
pub mod render;

404
src/components/analytics.rs Normal file
View File

@ -0,0 +1,404 @@
use std::rc::Rc;
use js_sys::Promise;
use serde::{Deserialize, Serialize};
use time::{Duration, OffsetDateTime};
use uuid::Uuid;
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
use wasm_bindgen_futures::JsFuture;
use yew::{function_component, html, use_effect_with_deps, use_reducer, Event, Html, Reducible};
use yew_hooks::{
use_async, use_async_with_options, use_event_with_window, UseAsyncHandle, UseAsyncOptions,
};
use yew_router::prelude::use_location;
#[wasm_bindgen(module = "/public/analytics.js")]
extern "C" {
#[wasm_bindgen(js_name = "getTimezone")]
fn get_timezone() -> Option<String>;
#[wasm_bindgen(js_name = "getReferrer")]
fn get_referrer() -> String;
#[wasm_bindgen(js_name = "getPosition")]
fn get_position() -> f64;
#[wasm_bindgen(catch, js_name = "sendBeacon")]
fn send_beacon(url: &str, body: &str) -> Result<Promise, JsValue>;
}
#[derive(Serialize)]
struct AnalyticsData {
path: Option<String>,
ua: Option<String>,
vw: Option<i32>,
vh: Option<i32>,
sw: Option<i32>,
sh: Option<i32>,
tz: Option<String>,
rf: Option<String>,
}
#[derive(Deserialize)]
struct AnalyticsResponse {
id: Option<Uuid>,
}
#[inline]
fn quick_f64_to_i32(value: f64) -> i32 {
value as i32
}
fn should_not_track() -> bool {
let dnt = gloo::utils::window().navigator().do_not_track();
dnt == "1" || dnt == "yes"
}
impl AnalyticsData {
pub fn capture() -> Self {
let window = gloo::utils::window();
let path = if let Ok(mut path) = window.location().pathname() {
if !path.starts_with('/') {
path.insert(0, '/')
}
if path.len() > 1 && path.ends_with('/') {
path.pop().expect("pop");
}
Some(path)
} else {
None
};
Self {
path,
ua: window.navigator().user_agent().ok(),
vw: window
.inner_width()
.expect("inner_width")
.as_f64()
.map(quick_f64_to_i32),
vh: window
.inner_height()
.expect("inner_height")
.as_f64()
.map(quick_f64_to_i32),
sw: window.screen().expect("screen").width().ok(),
sh: window.screen().expect("screen").height().ok(),
tz: get_timezone(),
rf: Some(get_referrer()),
}
}
}
#[derive(Clone)]
struct AnalyticsState {
view_id: Option<Uuid>,
start: OffsetDateTime,
scroll: f64,
visibility: VisibilityState,
}
#[derive(Clone)]
enum VisibilityState {
Unknown,
Visible {
total_hidden: Duration,
},
Hidden {
total: Duration,
start: OffsetDateTime,
},
}
impl Default for VisibilityState {
fn default() -> Self {
Self::Unknown
}
}
impl VisibilityState {
fn from_document() -> Self {
let hidden = gloo::utils::window().document().expect("document").hidden();
if hidden {
VisibilityState::Hidden {
total: Duration::new(0, 0),
start: OffsetDateTime::now_utc(),
}
} else {
VisibilityState::Visible {
total_hidden: Duration::new(0, 0),
}
}
}
fn to_visible(&self) -> Self {
match self {
Self::Unknown => Self::Visible {
total_hidden: Duration::new(0, 0),
},
Self::Hidden { total, start } => {
let hidden = OffsetDateTime::now_utc() - *start;
let total_hidden = *total + hidden;
log::info!(
"Page is now visible; was hidden for {} second(s) ({} total)",
hidden.whole_seconds(),
total_hidden.whole_seconds(),
);
Self::Visible { total_hidden }
}
Self::Visible { .. } => self.clone(),
}
}
fn to_hidden(&self) -> Self {
match self {
Self::Unknown => Self::Hidden {
total: Duration::new(0, 0),
start: OffsetDateTime::now_utc(),
},
Self::Hidden { .. } => self.clone(),
Self::Visible {
total_hidden: hidden,
} => Self::Hidden {
total: *hidden,
start: OffsetDateTime::now_utc(),
},
}
}
}
impl AnalyticsState {
fn new() -> Self {
Self {
view_id: None,
start: OffsetDateTime::now_utc(),
scroll: 0.0,
visibility: VisibilityState::default(),
}
}
fn new_with_id(id: Uuid) -> Self {
Self {
view_id: Some(id),
start: OffsetDateTime::now_utc(),
scroll: get_position().clamp(0.0, 100.0),
visibility: VisibilityState::from_document(),
}
}
fn get_total_hidden(&self) -> Duration {
match self.visibility {
VisibilityState::Unknown => Duration::seconds(0),
VisibilityState::Visible {
total_hidden: hidden,
} => hidden,
VisibilityState::Hidden { total, start } => total + (OffsetDateTime::now_utc() - start),
}
}
fn get_duration(&self) -> f64 {
((OffsetDateTime::now_utc() - self.start) - self.get_total_hidden())
.abs()
.clamp(Duration::new(0, 0), Duration::hours(2))
.as_seconds_f64()
.round()
}
}
enum AnalyticsAction {
NewPageView(Uuid),
SetScroll(f64),
VisibilityChanged(bool),
}
impl Reducible for AnalyticsState {
type Action = AnalyticsAction;
fn reduce(self: Rc<Self>, action: Self::Action) -> Rc<Self> {
match action {
AnalyticsAction::NewPageView(id) => Self::new_with_id(id),
AnalyticsAction::SetScroll(distance) => Self {
scroll: self.scroll.max(distance),
..(*self).clone()
},
AnalyticsAction::VisibilityChanged(visible) => {
let visibility = if visible {
self.visibility.to_visible()
} else {
self.visibility.to_hidden()
};
Self {
visibility,
..(*self).clone()
}
}
}
.into()
}
}
#[derive(Serialize)]
struct AnalyticsBeaconData {
duration: f64,
scroll: f64,
}
impl From<&AnalyticsState> for AnalyticsBeaconData {
fn from(state: &AnalyticsState) -> Self {
Self {
duration: state.get_duration(),
scroll: 0.0_f64.max(state.scroll),
}
}
}
impl AnalyticsBeaconData {
pub async fn send(&self, url: &str) -> Result<(), JsValue> {
let body = serde_json::to_string(self).expect("JSON");
let res = send_beacon(url, &body)?;
JsFuture::from(res).await?;
Ok(())
}
}
pub fn get_analytics_host() -> String {
let mut host = std::option_env!("ANALYTICS_HOST")
.unwrap_or("https://analytics.blakerain.com")
.to_string();
if !host.ends_with('/') {
host.push('/');
}
host
}
#[function_component(Analytics)]
pub fn analytics() -> Html {
let host = get_analytics_host();
let state = use_reducer(AnalyticsState::new);
let location = use_location();
let send_analytics: UseAsyncHandle<(), &'static str> = {
let host = host.clone();
let state = state.clone();
use_async_with_options(
async move {
if should_not_track() {
log::info!("Do Not Track is enabled; analytics will not be sent");
return Ok(());
}
let data = AnalyticsData::capture();
let res = reqwest::Client::new()
.post(format!("{host}page_view"))
.json(&data)
.send()
.await
.map_err(|err| {
log::error!("Unable to send analytics data: {err:?}");
"Unable to send analytics data"
})?;
let AnalyticsResponse { id } =
res.json::<AnalyticsResponse>().await.map_err(|err| {
log::error!("Unable to parse analytics response: {err:?}");
"Unable to parse analytics response"
})?;
if let Some(id) = id {
log::info!(
"New page view '{id}' (for '{}')",
data.path.unwrap_or_default()
);
state.dispatch(AnalyticsAction::NewPageView(id));
} else {
log::warn!("Analytics record was not created; received no UUID");
}
Ok(())
},
UseAsyncOptions::enable_auto(),
)
};
let send_beacon: UseAsyncHandle<(), &'static str> = {
let host = host.clone();
let state = state.clone();
use_async(async move {
if should_not_track() {
log::info!("Do Not Track is enabled; analytics beacon will not be sent");
return Ok(());
}
if let Some(id) = state.view_id {
log::info!("Sending beacon for page view '{id}'");
AnalyticsBeaconData::from(&*state)
.send(&format!("{host}page_view/{id}"))
.await
.map_err(|err| {
log::error!("Failed to send analytics beacon: {err:?}");
"Unable to send analytics beacon"
})?;
}
Ok(())
})
};
{
let send_beacon = send_beacon.clone();
use_effect_with_deps(
move |_| {
send_beacon.run();
send_analytics.run();
},
location.map(|loc| loc.path().to_string()),
)
}
{
let state = state.clone();
use_event_with_window("scroll", move |_: Event| {
let distance = get_position();
state.dispatch(AnalyticsAction::SetScroll(distance));
})
}
{
let state = state.clone();
let send_beacon = send_beacon.clone();
use_event_with_window("visibilitychange", move |_: Event| {
let hidden = gloo::utils::window().document().expect("document").hidden();
state.dispatch(AnalyticsAction::VisibilityChanged(!hidden));
if hidden {
send_beacon.run();
}
})
}
{
let send_beacon = send_beacon.clone();
use_event_with_window("pagehide", move |_: Event| {
send_beacon.run();
})
}
html! {}
}

View File

@ -0,0 +1 @@
pub mod bar_chart;

View File

@ -0,0 +1,146 @@
use yew::{classes, function_component, html, use_state, Callback, Html, Properties};
pub struct AxisScale {
pub height: f32,
pub min_value: f32,
pub max_value: f32,
}
impl AxisScale {
pub fn scale(&self, value: f32) -> f32 {
self.height * (value - self.min_value) / (self.max_value - self.min_value)
}
}
#[derive(Properties, PartialEq)]
pub struct BarChartProps {
pub labels: Vec<String>,
pub data: Vec<f32>,
pub onhover: Option<Callback<usize>>,
pub onleave: Option<Callback<()>>,
}
const CHART_WIDTH: f32 = 1000.0;
const CHART_HEIGHT: f32 = 562.0;
const TOP_OFFSET: f32 = 40.0;
const AXIS_OFFSET_X: f32 = 60.0;
const AXIS_OFFSET_Y: f32 = 40.0;
const CHART_AREA_WIDTH: f32 = CHART_WIDTH - AXIS_OFFSET_X;
const CHART_AREA_HEIGHT: f32 = CHART_HEIGHT - (TOP_OFFSET + AXIS_OFFSET_Y);
const AXIS_GRADUATION_COUNT: usize = 15;
#[function_component(BarChart)]
pub fn bar_chart(props: &BarChartProps) -> Html {
debug_assert_eq!(props.labels.len(), props.data.len());
let highlight = use_state(|| None::<usize>);
let mut min_value = f32::MAX;
let mut max_value = f32::MIN;
for value in &props.data {
min_value = min_value.min(*value);
max_value = max_value.max(*value);
}
let scale = AxisScale {
height: CHART_AREA_HEIGHT,
min_value,
max_value,
};
let graduations =
((15f32.min(max_value - min_value)).round() as usize).min(AXIS_GRADUATION_COUNT);
let graduation_step = CHART_AREA_HEIGHT / graduations as f32;
let bar_width = CHART_AREA_WIDTH / props.data.len() as f32;
html! {
<svg viewBox={format!("0 0 {} {}", CHART_WIDTH, CHART_HEIGHT)}
xmlns="http://www.w3.org/2000/svg">
<g transform={format!("translate({}, {})", AXIS_OFFSET_X, CHART_HEIGHT - TOP_OFFSET)}>
<line x="0" y="0"
x2={CHART_AREA_WIDTH.to_string()} y2="0"
stroke-width="1"
class="stroke-black dark:stroke-white" />
{ for props.labels.iter().enumerate().map(|(index, label)| html! {
<g transform={format!("translate({}, 0)", (index as f32 * bar_width) + (0.5 * bar_width))}>
<line y2="10" x2="0" class="stroke-black dark:stroke-white" />
<text dy="0.71em" y="16" x="0" style="text-anchor: middle" class="fill-black dark:fill-white">
{label.clone()}
</text>
</g>
})}
</g>
<g transform={format!("translate(0,{})", TOP_OFFSET)}>
<line
x1={AXIS_OFFSET_X.to_string()}
y1="0"
x2={AXIS_OFFSET_X.to_string()}
y2={CHART_AREA_HEIGHT.to_string()}
stroke-width="1"
class="stroke-black dark:stroke-white" />
<g transform={format!("translate({}, 0)", AXIS_OFFSET_X)}>
{ for (0..graduations).map(|index| {
let value = scale.max_value -
(index as f32 * (scale.max_value - scale.min_value) / graduations as f32);
html! {
<g transform={format!("translate(0, {})", index as f32 * graduation_step)}>
<line x2="-10" y2="0" class="stroke-black dark:stroke-white" />
<text dy="0.32em" x="-16" y="0" style="text-anchor: end" class="fill-black dark:fill-white">
{format!("{:.0}", value)}
</text>
</g>
}
})}
</g>
</g>
<g transform={format!("translate(0,{})", TOP_OFFSET)}>
{ for props.data.iter().enumerate().map(|(index, value)| {
let onhover = props.onhover.clone();
let onleave = props.onleave.clone();
if (scale.min_value - value).abs() < 0.01 {
return html! {}
}
html! {
<rect
x={((index as f32 * bar_width) + AXIS_OFFSET_X).to_string()}
y={(CHART_AREA_HEIGHT - scale.scale(*value)).to_string()}
width={bar_width.to_string()}
height={scale.scale(*value).to_string()}
class={
classes!(
"cursor-pointer",
if *highlight == Some(index) {
"fill-slate-700 dark:fill-slate-500"
} else {
"fill-slate-800 dark:fill-slate-400"
}
)
}
onmouseover={
let highlight = highlight.clone();
Callback::from(move |_| {
highlight.set(Some(index));
if let Some(onhover) = &onhover {
onhover.emit(index);
}
})
}
onmouseout={
let highlight = highlight.clone();
Callback::from(move |_| {
highlight.set(None);
if let Some(onleave) = &onleave {
onleave.emit(());
}
})
} />
}
})}
</g>
</svg>
}
}

View File

@ -2,6 +2,8 @@ use web_sys::{window, ScrollBehavior, ScrollToOptions};
use yew::{function_component, html, use_effect_with_deps, Children, Html, Properties};
use yew_router::prelude::use_location;
use crate::components::analytics::Analytics;
mod footer;
pub mod goto_top;
pub mod intersperse;
@ -34,6 +36,7 @@ pub fn layout(props: &LayoutProps) -> Html {
<navigation::Navigation />
{props.children.clone()}
<footer::Footer />
<Analytics />
</div>
}
}

View File

@ -40,6 +40,9 @@ pub fn footer(_: &FooterProps) -> Html {
rel="noreferrer">
{"Mastodon"}
</a>
<Link<Route> classes="hover:text-neutral-50" to={Route::AnalyticsRoot}>
{"Analytics"}
</Link<Route>>
</div>
<div>
{"Powered by "}

View File

@ -5,6 +5,7 @@ use yew::{function_component, html, use_memo, Children, ContextProvider, Html, P
macros::tags!("content/tags.yaml");
pub mod analytics;
pub mod blog;
pub mod pages;

31
src/model/analytics.rs Normal file
View File

@ -0,0 +1,31 @@
use serde::Deserialize;
use uuid::Uuid;
#[derive(Debug, Clone, Default, Deserialize)]
pub struct PageViewsMonth {
pub id: Uuid,
pub path: String,
pub year: i32,
pub month: i32,
pub day: i32,
pub count: i32,
pub total_beacon: i32,
pub total_scroll: f64,
pub total_duration: f64,
}
#[derive(Debug, Clone, Default, Deserialize)]
pub struct PageViewsPathCount {
pub path: String,
pub count: i64,
pub beacons: i64,
pub avg_duration: f64,
pub avg_scroll: f64,
}
#[derive(Debug, Clone, Default, Deserialize)]
pub struct PageViewsMonthResult {
pub site: PageViewsPathCount,
pub views: Vec<PageViewsMonth>,
pub paths: Vec<PageViewsPathCount>,
}

View File

@ -1,8 +1,11 @@
use enum_iterator::Sequence;
use yew::{html, Html};
use yew_router::Routable;
use yew_router::{Routable, Switch};
use self::analytics::AnalyticsRoute;
mod about;
mod analytics;
mod blog;
mod blog_post;
mod disclaimer;
@ -21,6 +24,10 @@ pub enum Route {
BlogPost { doc_id: crate::model::blog::DocId },
#[at("/disclaimer")]
Disclaimer,
#[at("/analytics")]
AnalyticsRoot,
#[at("/analytics/*")]
Analytics,
#[not_found]
#[at("/404")]
NotFound,
@ -31,6 +38,10 @@ impl Route {
!matches!(self, Self::Disclaimer)
}
pub fn should_render(&self) -> bool {
!matches!(self, Self::AnalyticsRoot | Self::Analytics)
}
pub fn switch(self) -> Html {
match self {
Self::Home => html! { <home::Page /> },
@ -39,6 +50,9 @@ impl Route {
Self::BlogPost { doc_id } => html! { <blog_post::Page {doc_id} /> },
Self::Disclaimer => html! { <disclaimer::Page /> },
Self::NotFound => html! { <not_found::Page /> },
Self::AnalyticsRoot | Self::Analytics => {
html! { <Switch<AnalyticsRoute> render={AnalyticsRoute::switch} /> }
}
}
}
}

26
src/pages/analytics.rs Normal file
View File

@ -0,0 +1,26 @@
use enum_iterator::Sequence;
use yew::{html, Html};
use yew_router::{prelude::Redirect, Routable};
use super::Route;
mod auth;
mod dashboard;
#[derive(Debug, Clone, Routable, Sequence, PartialEq)]
pub enum AnalyticsRoute {
#[at("/analytics")]
Dashboard,
#[not_found]
#[at("/analytics/404")]
NotFound,
}
impl AnalyticsRoute {
pub fn switch(self) -> Html {
match self {
Self::Dashboard => html! { <dashboard::Page /> },
Self::NotFound => html! { <Redirect<Route> to={Route::NotFound} /> },
}
}
}

509
src/pages/analytics/auth.rs Normal file
View File

@ -0,0 +1,509 @@
use std::rc::Rc;
use gloo::storage::{errors::StorageError, Storage};
use serde::Deserialize;
use wasm_bindgen::JsCast;
use web_sys::{HtmlInputElement, InputEvent, SubmitEvent};
use yew::{
function_component, html, use_reducer, Callback, Children, ContextProvider, Html, Properties,
Reducible, UseReducerHandle,
};
use yew_hooks::{use_async, use_async_with_options, use_interval, UseAsyncHandle, UseAsyncOptions};
use yew_icons::{Icon, IconId};
use crate::components::analytics::get_analytics_host;
#[derive(Debug, PartialEq)]
enum AuthState {
// There is no authentication information
Empty,
// We have a stored authentication token that we want to validate.
Validating { token: String },
// We have a valid authentication token.
Valid { token: String },
}
enum AuthStateAction {
UseToken(String),
Clear,
}
const STORED_TOKEN_ID: &str = "blakerain-analytics-token";
impl AuthState {
pub fn new() -> Self {
if let Some(token) = Self::get_stored_token() {
Self::Validating { token }
} else {
Self::Empty
}
}
pub fn get_stored_token() -> Option<String> {
match gloo::storage::LocalStorage::get(STORED_TOKEN_ID) {
Ok(token) => Some(token),
Err(err) => match err {
StorageError::KeyNotFound(_) => None,
StorageError::SerdeError(err) => {
log::error!("Failed to deserialize stored authentication token: {err:?}");
Self::remove_stored_token();
None
}
StorageError::JsError(err) => {
log::info!("Failed to load stored authentication token: {err:?}");
None
}
},
}
}
pub fn set_stored_token(token: &str) {
gloo::storage::LocalStorage::set(STORED_TOKEN_ID, token)
.expect("SessionStorage to be writable")
}
pub fn remove_stored_token() {
gloo::storage::LocalStorage::delete(STORED_TOKEN_ID)
}
}
impl Reducible for AuthState {
type Action = AuthStateAction;
fn reduce(self: Rc<Self>, action: Self::Action) -> Rc<Self> {
match action {
AuthStateAction::UseToken(token) => {
Self::set_stored_token(&token);
Self::Valid { token }
}
AuthStateAction::Clear => {
Self::remove_stored_token();
Self::Empty
}
}
.into()
}
}
#[derive(Deserialize)]
#[serde(tag = "type")]
pub enum SignInResponse {
InvalidCredentials,
NewPassword,
Successful { token: String },
}
#[derive(Deserialize)]
#[serde(tag = "type")]
pub enum ValidateTokenResponse {
Invalid,
Valid { token: String },
}
#[derive(Debug, Clone, PartialEq)]
pub struct AuthTokenContext(pub String);
#[derive(Properties, PartialEq)]
pub struct WithAuthProps {
#[prop_or_default]
pub children: Children,
}
#[function_component(WithAuth)]
pub fn with_auth(props: &WithAuthProps) -> Html {
let host = get_analytics_host();
let state = use_reducer(AuthState::new);
let submission: UseAsyncHandle<(), &'static str> = {
// If we have a token already in our state, then we want to validate it automatically.
let options = if let AuthState::Validating { .. } = *state {
UseAsyncOptions::enable_auto()
} else {
UseAsyncOptions::default()
};
let host = host.clone();
let state = state.clone();
use_async_with_options(
async move {
if let AuthState::Validating { token } = &*state {
log::info!("Validating and regenerating authentication token");
let res = reqwest::Client::new()
.post(format!("{host}auth/validate"))
.json(&serde_json::json!({
"token": token
}))
.send()
.await
.map_err(|err| {
log::error!(
"Unable to validate analytics authentication token: {err:?}"
);
"Unable to validate analytics authentication token"
})?;
let res = res.json::<ValidateTokenResponse>().await.map_err(|err| {
log::error!("Unable to parse analytics token validation response: {err:?}");
"Unable to parse analytics token validation response"
})?;
match res {
ValidateTokenResponse::Invalid => {
log::error!("Stored token was invalid; clearing state");
state.dispatch(AuthStateAction::Clear);
}
ValidateTokenResponse::Valid { token } => {
log::info!("Stored token was valid and regenerated");
state.dispatch(AuthStateAction::UseToken(token))
}
}
} else {
log::warn!("No analytics token was present to validate");
}
Ok(())
},
options,
)
};
// Every five minutes: refresh and revalidate the token.
use_interval(move || submission.run(), 5 * 60 * 1000);
match &*state {
AuthState::Empty => {
html! {
<SignIn host={host.clone()} state={state} />
}
}
AuthState::Validating { .. } => {
html! {
<div class="container mx-auto my-10">
<b class="text-xl font-semibold text-center">{"Validating Authentication ..."}</b>
</div>
}
}
AuthState::Valid { token } => {
html! {
<ContextProvider<AuthTokenContext> context={AuthTokenContext(token.clone())}>
{props.children.clone()}
</ContextProvider<AuthTokenContext>>
}
}
}
}
#[derive(Properties, PartialEq)]
struct AuthContainerProps {
title: String,
message: Option<String>,
error: Option<String>,
#[prop_or_default]
children: Children,
}
#[function_component(AuthContainer)]
fn auth_container(
AuthContainerProps {
title,
message,
error,
children,
}: &AuthContainerProps,
) -> Html {
html! {
<div class="container mx-auto my-10 flex flex-row justify-center">
<div class="flex flex-col gap-4 basis-full md:basis-2/3 lg:basis-1/2 xl:basis-1/3 p-4">
<div>
<h1 class="text-xl font-semibold">{title}</h1>
if let Some(message) = message {
<h2>{message}</h2>
}
</div>
if let Some(error) = error {
<h2 class="dark:text-red-500 text-red-700">{error}</h2>
}
{children.clone()}
</div>
</div>
}
}
#[derive(Properties, PartialEq)]
struct SignInProps {
pub host: String,
pub state: UseReducerHandle<AuthState>,
}
#[derive(Clone)]
struct SignInState {
processing: bool,
message: &'static str,
error: Option<String>,
username: String,
password: String,
new_password: Option<String>,
complete: bool,
}
enum SignInStateAction {
SetProcessing,
SetError(String),
SetUsername(String),
SetPassword(String),
SetNewPassword(String),
InvalidCredentials,
RequireNewPassword,
}
impl SignInState {
fn new() -> Self {
Self {
processing: false,
message: "Sign in using your username and password",
error: None,
username: String::new(),
password: String::new(),
new_password: None,
complete: false,
}
}
fn is_complete(username: &String, password: &String, new_password: Option<&String>) -> bool {
!username.is_empty()
&& !password.is_empty()
&& !new_password.map(String::is_empty).unwrap_or(false)
}
}
impl Reducible for SignInState {
type Action = SignInStateAction;
fn reduce(self: Rc<Self>, action: Self::Action) -> Rc<Self> {
match action {
SignInStateAction::SetProcessing => Self {
processing: true,
..(*self).clone()
},
SignInStateAction::SetError(error) => Self {
error: Some(error),
..(*self).clone()
},
SignInStateAction::SetUsername(username) => Self {
complete: Self::is_complete(&username, &self.password, self.new_password.as_ref()),
username,
..(*self).clone()
},
SignInStateAction::SetPassword(password) => Self {
complete: Self::is_complete(&self.username, &password, self.new_password.as_ref()),
password,
..(*self).clone()
},
SignInStateAction::SetNewPassword(new_password) => Self {
complete: Self::is_complete(&self.username, &self.password, Some(&new_password)),
new_password: Some(new_password),
..(*self).clone()
},
SignInStateAction::InvalidCredentials => Self {
processing: false,
error: Some("Invalid username or password".to_string()),
username: String::new(),
password: String::new(),
new_password: self.new_password.as_ref().map(|_| String::new()),
complete: false,
..(*self).clone()
},
SignInStateAction::RequireNewPassword => Self {
processing: false,
message: "Please enter a new password",
error: None,
password: String::new(),
new_password: Some(String::new()),
complete: false,
..(*self).clone()
},
}
.into()
}
}
#[function_component(SignIn)]
fn sign_in(SignInProps { host, state }: &SignInProps) -> Html {
let sign_in_state = use_reducer(SignInState::new);
let username_change = {
let sign_in_state = sign_in_state.clone();
Callback::from(move |event: InputEvent| {
let target = event
.target()
.expect("event target")
.dyn_into::<HtmlInputElement>()
.expect("input element");
sign_in_state.dispatch(SignInStateAction::SetUsername(target.value()));
})
};
let password_change = {
let sign_in_state = sign_in_state.clone();
Callback::from(move |event: InputEvent| {
let target = event
.target()
.expect("event target")
.dyn_into::<HtmlInputElement>()
.expect("input element");
sign_in_state.dispatch(SignInStateAction::SetPassword(target.value()));
})
};
let new_password_change = {
let sign_in_state = sign_in_state.clone();
Callback::from(move |event: InputEvent| {
let target = event
.target()
.expect("event target")
.dyn_into::<HtmlInputElement>()
.expect("input element");
sign_in_state.dispatch(SignInStateAction::SetNewPassword(target.value()));
})
};
let submit: UseAsyncHandle<(), &'static str> = {
let host = host.clone();
let state = state.clone();
let sign_in_state = sign_in_state.clone();
let payload = if let Some(new_password) = &sign_in_state.new_password {
serde_json::json!({
"username": sign_in_state.username,
"oldPassword": sign_in_state.password,
"newPassword": new_password
})
} else {
serde_json::json!({
"username": sign_in_state.username,
"password": sign_in_state.password
})
};
use_async(async move {
{
let res = reqwest::Client::new()
.post(if sign_in_state.new_password.is_some() {
format!("{host}auth/new_password")
} else {
format!("{host}auth/sign_in")
})
.json(&payload)
.send()
.await
.map_err(|err| {
log::error!("Failed to send authentication request: {err:?}");
"Error communicating with authentication server"
})?;
let res = res.json::<SignInResponse>().await.map_err(|err| {
log::error!("Failed to decode sign in response: {err:?}");
"Error communicating with authentication server"
})?;
match res {
SignInResponse::InvalidCredentials => {
sign_in_state.dispatch(SignInStateAction::InvalidCredentials);
}
SignInResponse::NewPassword => {
sign_in_state.dispatch(SignInStateAction::RequireNewPassword);
}
SignInResponse::Successful { token } => {
state.dispatch(AuthStateAction::UseToken(token));
}
}
Ok(())
}
.map_err(|err: &'static str| {
sign_in_state.dispatch(SignInStateAction::SetError(err.to_string()));
err
})
})
};
let onsubmit = {
let sign_in_state = sign_in_state.clone();
Callback::from(move |event: SubmitEvent| {
event.prevent_default();
if !sign_in_state.complete {
log::error!("Attempt to submit form without completing");
return;
}
sign_in_state.dispatch(SignInStateAction::SetProcessing);
submit.run()
})
};
html! {
<form {onsubmit}>
<AuthContainer
title="Sign In"
message={Some(sign_in_state.message.to_string())}
error={sign_in_state.error.clone()}>
<div class="flex flex-col">
<label>{"Username"}</label>
<input
type="text"
placeholder="username"
disabled={sign_in_state.processing}
value={sign_in_state.username.clone()}
oninput={username_change} />
</div>
<div class="flex flex-col">
<label>{"Password"}</label>
<input
type="password"
placeholder="password"
disabled={sign_in_state.processing}
value={sign_in_state.password.clone()}
oninput={password_change} />
</div>
if let Some(new_password) = &sign_in_state.new_password {
<div class="flex flex-col">
<label>{"New Password"}</label>
<input
type="password"
placeholder="new password"
disabled={sign_in_state.processing}
value={new_password.clone()}
oninput={new_password_change} />
</div>
}
<button
type="submit"
class="button mt-4"
disabled={!sign_in_state.complete || sign_in_state.processing}>
<Icon icon_id={IconId::LucideCheck} />
{"Sign In"}
</button>
</AuthContainer>
</form>
}
}

View File

@ -0,0 +1,271 @@
use time::{Month, OffsetDateTime};
use wasm_bindgen::JsCast;
use yew::{function_component, html, use_context, use_state, Callback, Html, UseStateHandle};
use yew_hooks::{use_async_with_options, UseAsyncHandle, UseAsyncOptions};
use yew_icons::{Icon, IconId};
use crate::{
components::{analytics::get_analytics_host, display::bar_chart::BarChart},
model::analytics::{PageViewsMonth, PageViewsMonthResult},
pages::analytics::auth::{AuthTokenContext, WithAuth},
};
async fn get_month_views(
host: &str,
token: &str,
year: i32,
month: i32,
) -> Result<PageViewsMonthResult, &'static str> {
reqwest::Client::new()
.get(format!("{host}query/month/{year}/{month}"))
.header("Authorization", format!("Bearer {token}"))
.send()
.await
.map_err(|err| {
log::error!("Unable to query analytics API: {err:?}");
"Unable to query analytics API"
})?
.json()
.await
.map_err(|err| {
log::error!("Unable to deserialize response from analytics API: {err:?}");
"Unable to deserialize response from analytics API"
})
}
fn month_view_chart(
year: i32,
month: Month,
bar_hover: UseStateHandle<Option<usize>>,
mut views: &[PageViewsMonth],
) -> Html {
let ndays = time::util::days_in_year_month(year, month) as i32;
let mut labels = Vec::new();
let mut padded = Vec::new();
for i in 0..ndays {
labels.push((i + 1).to_string());
if let Some(view) = views.first() {
if view.day == i + 1 {
padded.push(view.count as f32);
views = &views[1..];
continue;
}
}
padded.push(0.0);
}
debug_assert_eq!(padded.len(), ndays as usize);
let onhover = {
let bar_hover = bar_hover.clone();
Callback::from(move |index| bar_hover.set(Some(index)))
};
let onleave = {
let bar_hover = bar_hover.clone();
Callback::from(move |_| bar_hover.set(None))
};
html! {
<BarChart labels={labels} data={padded} {onhover} {onleave} />
}
}
fn month_select_options(active: Month) -> Html {
let mut month = Month::January;
let mut nodes = Vec::new();
for _ in 0..12 {
nodes.push(html! {
<option
value={month.to_string()}
selected={month == active}>
{month.to_string()}
</option>
});
month = month.next();
}
nodes.into_iter().collect::<Html>()
}
#[function_component(DashboardContent)]
fn dashboard_content() -> Html {
let now = OffsetDateTime::now_local().expect("local time");
let host = get_analytics_host();
let token = use_context::<AuthTokenContext>().expect("AuthTokenContext to be provided");
let year = use_state(|| now.year());
let month = use_state(|| now.month());
let month_result = use_state(PageViewsMonthResult::default);
let bar_hover = use_state(|| None::<usize>);
let load_dashboard: UseAsyncHandle<(), &'static str> = {
let year = year.clone();
let month = month.clone();
let month_result = month_result.clone();
use_async_with_options(
async move {
let mut result = get_month_views(&host, &token.0, *year, (*month) as i32).await?;
result.paths.sort_by(|a, b| {
let a = a.count + a.beacons;
(b.count + b.beacons).cmp(&a)
});
month_result.set(result);
Ok(())
},
UseAsyncOptions::enable_auto(),
)
};
let onrefresh = {
let load_dashboard = load_dashboard.clone();
Callback::from(move |_| load_dashboard.run())
};
let year_change = {
let year = year.clone();
let load_dashboard = load_dashboard.clone();
Callback::from(move |event: yew::Event| {
let input = event
.target()
.unwrap()
.dyn_into::<web_sys::HtmlInputElement>()
.unwrap();
year.set(input.value().parse().unwrap_or(now.year()));
load_dashboard.run();
})
};
let month_change = {
let month = month.clone();
let load_dashboard = load_dashboard.clone();
Callback::from(move |event: yew::Event| {
let input = event
.target()
.unwrap()
.dyn_into::<web_sys::HtmlSelectElement>()
.unwrap();
month.set(input.value().parse().unwrap_or(now.month()));
load_dashboard.run();
})
};
html! {
<div class="container mx-auto flex flex-col gap-4 my-10">
<div class="flex justify-between items-center">
<div class="flex flex-row items-center gap-2">
<h1 class="text-2xl font-semibold">{"Analytics"}</h1>
<input
type="number"
class="w-[8rem]"
onchange={year_change}
value={(*year).to_string()} />
<select onchange={month_change}>
{month_select_options(*month)}
</select>
</div>
<button type="button" class="button" onclick={onrefresh}>
<Icon icon_id={IconId::LucideRefreshCw} />
{"Refresh"}
</button>
</div>
<div class="grid 2xl:grid-cols-2 gap-4">
<div>
<div class="border border-primary rounded-md pr-4">
{month_view_chart(*year, *month, bar_hover.clone(), &month_result.views)}
</div>
<div class="h-4 text-sm mt-2">
if let Some(index) = *bar_hover {
if let Some(day) = month_result.views.iter().find(|view| view.day == (index as i32) + 1) {
{ format!(
"{:04}-{:02}-{:02}: {} views, {} beacons, {:.2}s avg. duration, {:.2}% avg. scroll",
*year,
*month as u8,
day.day,
day.count,
day.total_beacon,
if day.total_beacon != 0 {
day.total_scroll / day.total_beacon as f64
} else {
0.0
},
if day.total_beacon != 0 {
day.total_scroll / day.total_beacon as f64
} else {
0.0
},
) }
}
}
</div>
</div>
<div>
<div class="table">
<table class="table tight">
<thead>
<tr>
<th class="left">{"Path"}</th>
<th class="right">{"View Count"}</th>
<th class="right">{"Total Beacons"}</th>
<th class="right">{"Avg. Duration"}</th>
<th class="right">{"Avg. Scroll"}</th>
</tr>
</thead>
<tbody>
{for month_result.paths.iter().map(|path| html! {
<tr>
<td><code>{ path.path.clone() }</code></td>
<td class="right">{ path.count.to_string() }</td>
<td class="right">{ path.beacons.to_string() }</td>
<td class="right">
{ format!("{:.0} s", path.avg_duration) }
</td>
<td class="right">
{ format!("{:.0}%", path.avg_scroll) }
</td>
</tr>
})}
<tr class="dark:bg-neutral-800">
<td class="font-bold">{"Total"}</td>
<td class="font-bold right">
{ month_result.site.count.to_string() }
</td>
<td class="font-bold right">
{ month_result.site.beacons.to_string() }
</td>
<td class="font-bold right">
{ format!("{:.0} s", month_result.site.avg_duration) }
</td>
<td class="font-bold right">
{ format!("{:.0}%", month_result.site.avg_scroll) }
</td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
</div>
}
}
#[function_component(Page)]
pub fn page() -> Html {
html! {
<WithAuth>
<DashboardContent />
</WithAuth>
}
}

View File

@ -6,6 +6,35 @@
@apply dark:bg-zinc-900 dark:text-neutral-200;
}
.button {
@apply inline-flex items-center justify-center border border-transparent;
@apply px-4 py-2;
@apply rounded-md shadow-sm text-sm text-gray-300 bg-primary;
@apply disabled:bg-slate-300 dark:disabled:bg-gray-600 dark:disabled:text-gray-400;
@apply hover:text-white dark:disabled:hover:text-gray-400;
@apply focus:outline-none focus:ring focus:ring-offset-2 focus:ring-opacity-50 focus:ring-slate-400;
@apply transition-colors;
> svg {
@apply mr-1;
}
}
select,
input[type="text"],
input[type="number"],
input[type="password"] {
@apply border-primary rounded-md;
@apply text-neutral-800 placeholder:text-neutral-300;
@apply dark:bg-zinc-800 dark:text-neutral-200 dark:placeholder:text-neutral-700;
@apply focus:outline-none focus:ring focus:ring-offset-2 focus:ring-opacity-50 focus:ring-slate-400;
&:disabled {
@apply text-neutral-500 dark:text-neutral-500;
@apply dark:bg-zinc-900;
}
}
.markdown {
@apply flex flex-col;
@apply font-text text-xl print:text-base;
@ -239,60 +268,6 @@
}
}
div.table {
@apply flex overflow-x-scroll;
}
table {
@apply min-w-full mb-8 border-collapse table-auto;
thead {
@apply bg-transparent dark:bg-neutral-800;
@apply dark:text-white;
@apply border-b border-neutral-500;
tr {
th {
@apply px-6 py-4;
&.left {
@apply text-left;
}
&.right {
@apply text-right;
}
&.center {
@apply text-center;
}
}
}
}
tbody {
tr {
@apply border-b border-neutral-400 dark:border-neutral-600;
td {
@apply whitespace-nowrap px-6 py-4;
&.left {
@apply text-left;
}
&.right {
@apply text-right;
}
&.center {
@apply text-center;
}
}
}
}
}
.callout {
@apply flex flex-col gap-2 rounded-md p-4 text-base mb-8;
@apply print:border-2 print:p-2;
@ -374,6 +349,67 @@
@apply text-violet-600 dark:text-violet-400;
}
}
}
} /* .callout */
} /* .markdown */
div.table {
@apply flex overflow-x-scroll;
}
table {
@apply min-w-full mb-8 border-collapse table-auto;
thead {
@apply bg-transparent dark:bg-neutral-800;
@apply dark:text-white;
@apply border-b border-neutral-500;
tr {
th {
@apply px-6 py-4;
&.left {
@apply text-left;
}
&.right {
@apply text-right;
}
&.center {
@apply text-center;
}
}
}
} /* thead */
tbody {
tr {
@apply border-b border-neutral-400 dark:border-neutral-600;
td {
@apply whitespace-nowrap px-6 py-4;
&.left {
@apply text-left;
}
&.right {
@apply text-right;
}
&.center {
@apply text-center;
}
}
}
} /* tbody */
&.tight {
thead tr th,
tbody tr td {
@apply px-3 py-2;
}
}
} /* table */
}