Introduction
This guide covers production patterns for managing the artist waiting list and monitoring whitepaper downloads. These workflows are designed for admin dashboards, batch processing scripts, and scheduled tasks.
Prerequisites
Waiting List Management
Daily Review Workflow
Process pending applications in FIFO order:
# Get oldest pending applications
pending <- get_pending_waitlist(status = "pending", limit = 20)
for (i in seq_len(nrow(pending))) {
entry <- pending[i, ]
cat(sprintf(
"\n[%d] %s <%s>\n",
i, entry$full_name, entry$email
))
cat(sprintf(" Specialties: %s\n", entry$specialties))
cat(sprintf(" Portfolio: %s\n", entry$url_portfolio))
cat(sprintf(" Message: %s\n", entry$message))
cat(sprintf(" Applied: %s\n", entry$created_at))
# Review logic here...
# Manually approve/reject based on portfolio review
}Batch Status Updates
Move multiple applications through the workflow:
# Get applications under review
reviewing <- get_pending_waitlist(status = "reviewing")
# Invite approved artists based on criteria
approved_ids <- reviewing$id[
stringr::str_detect(reviewing$specialties, "Digital|NFT")
]
for (id in approved_ids) {
update_waitlist_status(id, status = "invited")
message("Invited: ", id)
}Conversion Pipeline
Convert invited artists to user accounts:
# Get invited artists ready for conversion
invited <- get_pending_waitlist(status = "invited")
for (i in seq_len(nrow(invited))) {
entry <- invited[i, ]
tryCatch({
user_id <- convert_waitlist_to_user(
waitlist_id = entry$id,
send_invite_email = TRUE
)
message("Converted ", entry$email, " -> ", user_id)
}, error = function(e) {
warning("Failed to convert ", entry$email, ": ", e$message)
})
}Status Summary Report
Generate a summary of waiting list status:
statuses <- c("pending", "reviewing", "invited", "converted", "rejected")
summary_data <- lapply(statuses, function(s) {
entries <- get_pending_waitlist(status = s, limit = 1000)
data.frame(
status = s,
count = nrow(entries),
stringsAsFactors = FALSE
)
})
summary_df <- do.call(rbind, summary_data)
print(summary_df)
# Example output:
# status count
# 1 pending 45
# 2 reviewing 12
# 3 invited 8
# 4 converted 156
# 5 rejected 23Whitepaper Analytics
Daily Download Report
Generate a daily report of whitepaper activity:
today <- Sys.Date()
yesterday <- today - 1
stats <- get_whitepaper_stats(
start_date = yesterday,
end_date = today
)
cat("=== Whitepaper Download Report ===\n")
cat(sprintf("Period: %s to %s\n", yesterday, today))
cat(sprintf("Total Downloads: %d\n", stats$total_downloads))
cat(sprintf("Unique Emails: %d\n", stats$unique_emails))
cat("\nBy User Type:\n")
print(stats$by_user_type)
cat(sprintf("\nAuthenticated: %d\n", stats$authenticated))
cat(sprintf("Anonymous: %d\n", stats$anonymous))Investor Lead Pipeline
Identify and prioritize investor leads:
# Get recent investor downloads
investor_leads <- get_recent_downloads(
limit = 100,
user_type = "investor"
)
# Filter to unique emails, most recent first
unique_leads <- investor_leads[!duplicated(investor_leads$email), ]
cat("=== Investor Leads ===\n")
for (i in seq_len(min(10, nrow(unique_leads)))) {
lead <- unique_leads[i, ]
cat(sprintf(
"%d. %s (downloaded: %s)\n",
i, lead$email, lead$downloaded_at
))
}
# Export to CRM
write.csv(
unique_leads,
file = paste0("investor_leads_", Sys.Date(), ".csv"),
row.names = FALSE
)Engagement Tracking
Check engagement before investor meetings:
check_investor_engagement <- function(email) {
downloads <- get_downloads_by_email(email)
if (nrow(downloads) == 0) {
return(list(
engaged = FALSE,
message = "No whitepaper downloads on record"
))
}
list(
engaged = TRUE,
download_count = nrow(downloads),
first_download = min(downloads$downloaded_at),
last_download = max(downloads$downloaded_at),
message = sprintf(
"Downloaded %d time(s), last on %s",
nrow(downloads),
max(downloads$downloaded_at)
)
)
}
# Before investor meeting
engagement <- check_investor_engagement("partner@vc.com")
print(engagement)Trend Analysis
Track download trends over time:
# Weekly breakdown for the last 8 weeks
weeks <- seq(Sys.Date() - 56, Sys.Date(), by = 7)
weekly_stats <- lapply(seq_along(weeks[-length(weeks)]), function(i) {
stats <- get_whitepaper_stats(
start_date = weeks[i],
end_date = weeks[i + 1] - 1
)
data.frame(
week_start = weeks[i],
total = stats$total_downloads,
unique = stats$unique_emails,
investors = stats$by_user_type$count[
stats$by_user_type$user_type == "investor"
],
stringsAsFactors = FALSE
)
})
trend_df <- do.call(rbind, weekly_stats)
print(trend_df)
# Plot trend (if using ggplot2)
# library(ggplot2)
# ggplot(trend_df, aes(x = week_start, y = total)) +
# geom_line() +
# geom_point() +
# labs(title = "Weekly Whitepaper Downloads", y = "Downloads", x = "Week")Error Handling
Duplicate Email Handling
The waiting list enforces unique emails:
tryCatch({
create_waitlist_entry(
email = "existing@example.com",
full_name = "Test User"
)
}, warning = function(w) {
if (grepl("already exists", w$message)) {
# Email already in waiting list - get existing entry
existing_id <- as.character(w$message) # Extract ID from warning
message("Email already registered: ", existing_id)
}
})Connection Management
For batch operations, reuse connections to avoid overhead:
# Get a single connection for multiple operations
cn <- artcore::dbc("artsite")
tryCatch({
# Multiple operations with same connection
pending <- get_pending_waitlist(cn = cn)
for (id in pending$id[1:5]) {
update_waitlist_status(id, status = "reviewing", cn = cn)
}
# More operations...
stats <- get_whitepaper_stats(cn = cn)
}, finally = {
# Always disconnect, even if error occurs
artcore::dbd(cn)
})Robust Batch Processing
Handle errors gracefully in batch operations:
# Process with error tracking
cn <- artcore::dbc("artsite")
results <- list(success = c(), failed = c())
tryCatch({
invited <- get_pending_waitlist(status = "invited", cn = cn)
for (i in seq_len(nrow(invited))) {
entry <- invited[i, ]
result <- tryCatch({
user_id <- convert_waitlist_to_user(
waitlist_id = entry$id,
send_invite_email = TRUE,
cn = cn
)
results$success <- c(results$success, entry$email)
TRUE
}, error = function(e) {
results$failed <- c(results$failed, entry$email)
warning(sprintf(
"Failed to convert %s: %s",
entry$email, e$message
))
FALSE
})
}
}, finally = {
artcore::dbd(cn)
})
# Summary
cat(sprintf(
"Processed: %d successful, %d failed\n",
length(results$success),
length(results$failed)
))Scheduled Tasks
Cron Job Examples
Daily Pending Review Alert
Save as pending-review-alert.R:
Schedule with cron:
0 9 * * * /usr/bin/Rscript /path/to/pending-review-alert.RWeekly Analytics Report
Save as weekly-analytics.R:
#!/usr/bin/env Rscript
library(artauth)
end_date <- Sys.Date()
start_date <- end_date - 7
stats <- get_whitepaper_stats(
start_date = start_date,
end_date = end_date
)
# Format report
report <- sprintf(
"Weekly Whitepaper Report (%s - %s)\n\nDownloads: %d\nUnique: %d\nInvestors: %d\nArtists: %d",
start_date, end_date,
stats$total_downloads,
stats$unique_emails,
stats$by_user_type$count[stats$by_user_type$user_type == "investor"],
stats$by_user_type$count[stats$by_user_type$user_type == "artist"]
)
message(report)
# Email report
# artsend::send_report_email(
# to = "team@artalytics.com",
# subject = "Weekly Whitepaper Analytics",
# body = report
# )Schedule with cron:
0 8 * * 1 /usr/bin/Rscript /path/to/weekly-analytics.RMonthly Conversion Pipeline
Save as monthly-conversions.R:
#!/usr/bin/env Rscript
library(artauth)
cn <- artcore::dbc("artsite")
tryCatch({
# Get invited artists who have been waiting > 7 days
invited <- get_pending_waitlist(status = "invited", cn = cn)
ready_for_conversion <- invited[
as.Date(invited$invited_at) < Sys.Date() - 7,
]
if (nrow(ready_for_conversion) > 0) {
for (i in seq_len(nrow(ready_for_conversion))) {
entry <- ready_for_conversion[i, ]
user_id <- convert_waitlist_to_user(
waitlist_id = entry$id,
send_invite_email = TRUE,
cn = cn
)
message(sprintf(
"Converted %s (%s) -> %s",
entry$full_name, entry$email, user_id
))
}
}
}, finally = {
artcore::dbd(cn)
})Schedule with cron:
0 10 1 * * /usr/bin/Rscript /path/to/monthly-conversions.RAdmin Dashboard
Example Shiny admin dashboard:
library(shiny)
library(DT)
ui <- fluidPage(
titlePanel("Artalytics Admin Dashboard"),
tabsetPanel(
# Waiting List Tab
tabPanel("Waiting List",
fluidRow(
column(4,
h4("Summary"),
verbatimTextOutput("waitlist_summary")
),
column(8,
actionButton("refresh_waitlist", "Refresh"),
DT::dataTableOutput("pending_table")
)
)
),
# Whitepaper Tab
tabPanel("Whitepaper Downloads",
fluidRow(
column(4,
h4("Statistics"),
verbatimTextOutput("download_stats")
),
column(8,
DT::dataTableOutput("recent_downloads")
)
)
),
# Investor Leads Tab
tabPanel("Investor Leads",
fluidRow(
column(12,
actionButton("refresh_investors", "Refresh"),
DT::dataTableOutput("investor_table"),
downloadButton("export_investors", "Export to CSV")
)
)
)
)
)
server <- function(input, output, session) {
# Waiting List Data
waitlist_data <- reactive({
input$refresh_waitlist
get_pending_waitlist(limit = 100)
})
output$pending_table <- DT::renderDataTable({
waitlist_data()[, c("full_name", "email", "status", "created_at")]
})
output$waitlist_summary <- renderPrint({
statuses <- c("pending", "reviewing", "invited", "converted", "rejected")
summary <- sapply(statuses, function(s) {
nrow(get_pending_waitlist(status = s, limit = 1000))
})
summary
})
# Whitepaper Downloads
output$download_stats <- renderPrint({
get_whitepaper_stats()
})
output$recent_downloads <- DT::renderDataTable({
recent <- get_recent_downloads(limit = 50)
recent[, c("email", "user_type", "downloaded_at")]
})
# Investor Leads
investor_data <- reactive({
input$refresh_investors
get_recent_downloads(limit = 100, user_type = "investor")
})
output$investor_table <- DT::renderDataTable({
unique_investors <- investor_data()[
!duplicated(investor_data()$email),
]
unique_investors[, c("email", "downloaded_at", "ip_address")]
})
output$export_investors <- downloadHandler(
filename = function() {
paste0("investor_leads_", Sys.Date(), ".csv")
},
content = function(file) {
unique_investors <- investor_data()[
!duplicated(investor_data()$email),
]
write.csv(unique_investors, file, row.names = FALSE)
}
)
}
shinyApp(ui, server)Best Practices
Connection Pooling
For high-traffic applications, consider connection pooling:
# In global.R or app startup
library(pool)
pool <- pool::dbPool(
drv = RPostgres::Postgres(),
host = Sys.getenv("ART_PGHOST_SITE"),
port = Sys.getenv("ART_PGPORT_SITE"),
user = Sys.getenv("ART_PGUSER_SITE"),
password = Sys.getenv("ART_PGPASS_SITE"),
dbname = Sys.getenv("ART_PGDATA_SITE")
)
# Use pool connection in functions
# Note: artauth functions expect artcore connections,
# so use pool for custom queries onlyLogging
Add comprehensive logging for production:
library(rdstools)
# Wrap critical operations with logging
process_applications <- function() {
rdstools::log_inf("Starting application processing")
tryCatch({
pending <- get_pending_waitlist(status = "pending")
rdstools::log_inf(sprintf("Found %d pending applications", nrow(pending)))
# Process...
rdstools::log_suc("Application processing complete")
}, error = function(e) {
rdstools::log_err(sprintf("Processing failed: %s", e$message))
stop(e)
})
}Rate Limiting
For API endpoints exposing waitlist signup:
# In Shiny app or plumber API
library(ratelimitr)
# Limit to 5 signups per IP per hour
rate_limit <- limit_rate(
rate(5, 3600),
by = "ip_address"
)
# Apply to signup endpoint
rate_limit(function(email, full_name, ip) {
create_waitlist_entry(
email = email,
full_name = full_name,
source = "api"
)
})Next Steps
- Get Started Guide - Detailed explanations and workflows
- Quickstart - Quick reference for common tasks
- Function Reference - Complete API documentation
For questions and issues, visit the GitHub repository.
