library(jsonlite)
fetch_nfts_batch <- function(token_ids, contract, chain, delay = 0.5) {
results <- list()
errors <- list()
for (i in seq_along(token_ids)) {
id <- token_ids[i]
tryCatch({
json <- get_os_nft(id, chain, contract)
results[[as.character(id)]] <- fromJSON(json)
# Rate limiting: wait between requests
if (i < length(token_ids)) {
Sys.sleep(delay)
}
}, error = function(e) {
errors[[as.character(id)]] <- e$message
message(stringr::str_glue("Error fetching token {id}: {e$message}"))
})
}
list(results = results, errors = errors)
}
# Usage
batch <- fetch_nfts_batch(
token_ids = 1:10,
contract = "0x6444522C5aD44285b7856dd3336D09Fb939865F1",
chain = "matic",
delay = 0.5 # 500ms between requests
)
# Check results
length(batch$results) # Successful fetches
length(batch$errors) # Failed fetchesAdvanced Use Cases
This guide covers complex workflows, performance optimization, and production-ready patterns for using artopensea in real applications.
Batch Operations
Fetching Multiple NFTs Efficiently
When fetching metadata for multiple NFTs, implement rate limiting and error handling:
Key considerations:
- Rate limiting: OpenSea has rate limits. Add delays between requests (0.5-1 second recommended)
- Error handling: Some NFTs may be burned or hidden. Catch errors to avoid stopping the batch
- Progress tracking: For large batches, add progress indicators
Parallel Processing with Rate Limiting
For very large batches, use parallel processing with rate limiting:
library(data.table)
# Split token IDs into chunks
chunk_size <- 5
token_ids <- 1:100
chunks <- split(token_ids, ceiling(seq_along(token_ids) / chunk_size))
# Process each chunk sequentially with delays
all_results <- lapply(chunks, function(chunk_ids) {
chunk_results <- fetch_nfts_batch(chunk_ids, contract, chain, delay = 0.3)
# Delay between chunks
Sys.sleep(2)
chunk_results$results
})
# Flatten results
nfts <- unlist(all_results, recursive = FALSE)This approach processes NFTs in batches of 5 with delays, reducing the risk of rate limiting while maintaining reasonable speed.
Cross-Chain NFT Discovery
Querying Multiple Chains
Fetch NFTs owned by an account across multiple blockchains:
account <- "0xa16DCcD55139D5eF5B5Ff776553ef080EB6258fc"
chains <- c("ethereum", "matic", "arbitrum", "base")
cross_chain_portfolio <- function(account, chains) {
portfolio <- list()
for (chain in chains) {
tryCatch({
json <- get_os_account_nfts(account, chain)
data <- fromJSON(json)
if (length(data$nfts) > 0) {
portfolio[[chain]] <- data.table::as.data.table(data$nfts)
portfolio[[chain]][, chain := chain] # Add chain column
}
# Rate limiting
Sys.sleep(1)
}, error = function(e) {
message(stringr::str_glue("Error on {chain}: {e$message}"))
})
}
# Combine all chains
data.table::rbindlist(portfolio, fill = TRUE)
}
# Get cross-chain portfolio
all_nfts <- cross_chain_portfolio(account, chains)
# Analyze by chain
all_nfts[, .N, by = chain]Building a Multi-Chain Gallery
Create a Shiny app showing NFTs from all chains:
library(shiny)
library(data.table)
ui <- fluidPage(
titlePanel("Multi-Chain NFT Gallery"),
selectInput("chain_filter", "Filter by Chain",
choices = c("All", "ethereum", "matic", "arbitrum", "base")
),
uiOutput("gallery")
)
server <- function(input, output, session) {
# Load portfolio on startup
portfolio <- reactive({
cross_chain_portfolio(account, chains)
})
output$gallery <- renderUI({
data <- portfolio()
# Filter by selected chain
if (input$chain_filter != "All") {
data <- data[chain == input$chain_filter]
}
# Generate NFT cards
cards <- lapply(1:min(20, nrow(data)), function(i) {
nft <- data[i]
div(
style = "display: inline-block; margin: 10px; width: 200px;",
img(src = nft$image_url, width = "100%"),
h4(nft$name),
p(paste("Chain:", nft$chain)),
getOpenseaBadge(
contract = nft$contract,
id = nft$identifier,
chain = nft$chain,
width = "120px"
)
)
})
tagList(cards)
})
}Working with Large Portfolios
Implementing Pagination
For accounts with >200 NFTs, implement cursor-based pagination:
fetch_all_account_nfts <- function(account, chain, max_pages = 10) {
all_nfts <- list()
page <- 1
next_cursor <- NULL
while (page <= max_pages) {
# Note: The current package functions don't expose cursor parameter
# This is a conceptual example. For production, you'd need to use
# httr2 directly to pass the cursor parameter
json <- get_os_account_nfts(account, chain)
data <- fromJSON(json)
if (length(data$nfts) == 0) break
all_nfts[[page]] <- data.table::as.data.table(data$nfts)
# Check for next page
if (is.null(data[["next"]]) || data[["next"]] == "") break
next_cursor <- data[["next"]]
page <- page + 1
# Rate limiting between pages
Sys.sleep(1)
}
data.table::rbindlist(all_nfts, fill = TRUE)
}Important: The current artopensea package does not expose the next cursor parameter. For full pagination support, you would need to use httr2 directly or submit a feature request to add cursor support to the package functions.
Advanced JSON Parsing
Extracting Nested Trait Data
NFT traits/attributes are often nested. Here’s how to extract and analyze them:
library(data.table)
nft_json <- get_os_nft(
id = 2,
chain = "matic",
contract = "0x6444522C5aD44285b7856dd3336D09Fb939865F1"
)
nft <- fromJSON(nft_json)
# Extract traits as data.table
traits_dt <- as.data.table(nft$nft$traits)
# Analyze trait distribution
traits_dt[, .N, by = trait_type]
# Find rare traits
traits_dt[trait_type == "Background", value]Building Trait Frequency Tables
For a collection, analyze trait rarity:
analyze_collection_traits <- function(token_ids, contract, chain) {
# Fetch all NFTs
batch <- fetch_nfts_batch(token_ids, contract, chain)
# Extract all traits
all_traits <- lapply(batch$results, function(nft) {
if (!is.null(nft$nft$traits)) {
data.table::as.data.table(nft$nft$traits)
}
})
traits_dt <- data.table::rbindlist(all_traits, fill = TRUE)
# Calculate trait frequencies
trait_freq <- traits_dt[, .N, by = .(trait_type, value)]
trait_freq[order(-N)]
}
# Usage
trait_analysis <- analyze_collection_traits(
token_ids = 1:100,
contract = "0x6444522C5aD44285b7856dd3336D09Fb939865F1",
chain = "matic"
)
# Find rarest traits
trait_analysis[N == 1] # Unique traitsPerformance Optimization
Caching API Responses
Avoid redundant API calls by caching responses:
library(digest)
# Create cache directory
cache_dir <- fs::path(tempdir(), "opensea_cache")
fs::dir_create(cache_dir)
cached_get_nft <- function(id, chain, contract, cache_ttl = 3600) {
# Generate cache key
cache_key <- digest::digest(list(id, chain, contract))
cache_file <- fs::path(cache_dir, paste0(cache_key, ".json"))
# Check if cache exists and is fresh
if (fs::file_exists(cache_file)) {
file_age <- difftime(Sys.time(), fs::file_info(cache_file)$modification_time,
units = "secs")
if (as.numeric(file_age) < cache_ttl) {
return(readLines(cache_file, warn = FALSE))
}
}
# Fetch fresh data
json <- get_os_nft(id, chain, contract)
# Write to cache
writeLines(json, cache_file)
json
}
# Usage
nft_json <- cached_get_nft(2, "matic", "0x6444522C5aD44285b7856dd3336D09Fb939865F1")This caches responses for 1 hour (3600 seconds), dramatically reducing API calls for repeated queries.
Preloading NFT Metadata
For Shiny apps, preload metadata on startup rather than fetching on demand:
library(shiny)
# Preload data before app starts
nft_metadata <- fetch_nfts_batch(
token_ids = 1:50,
contract = "0x6444522C5aD44285b7856dd3336D09Fb939865F1",
chain = "matic"
)$results
ui <- fluidPage(
titlePanel("Preloaded Gallery"),
selectInput("nft_id", "Select NFT", choices = names(nft_metadata)),
uiOutput("nft_display")
)
server <- function(input, output, session) {
output$nft_display <- renderUI({
nft <- nft_metadata[[input$nft_id]]
div(
h3(nft$nft$name),
img(src = nft$nft$image_url, width = "400px"),
p(nft$nft$description)
)
})
}Preloading eliminates UI lag from API calls and ensures consistent performance.
Error Recovery Strategies
Retry Logic
Implement exponential backoff for transient errors:
get_nft_with_retry <- function(id, chain, contract, max_retries = 3) {
for (attempt in 1:max_retries) {
tryCatch({
return(get_os_nft(id, chain, contract))
}, error = function(e) {
if (attempt == max_retries) {
stop(stringr::str_glue(
"Failed after {max_retries} attempts: {e$message}"
))
}
# Exponential backoff: 1s, 2s, 4s
delay <- 2^(attempt - 1)
message(stringr::str_glue(
"Attempt {attempt} failed. Retrying in {delay}s..."
))
Sys.sleep(delay)
})
}
}Graceful Degradation
Handle missing or incomplete data gracefully:
display_nft_safe <- function(nft_json) {
nft <- fromJSON(nft_json)
# Provide defaults for missing fields
name <- nft$nft$name %||% "Unnamed NFT"
description <- nft$nft$description %||% "No description"
image_url <- nft$nft$image_url %||% "https://via.placeholder.com/400"
div(
h3(name),
img(src = image_url, width = "400px"),
p(description)
)
}Building Production Shiny Apps
Complete NFT Gallery Example
A production-ready Shiny app with error handling, caching, and responsive design:
library(shiny)
library(data.table)
# Preload data with error handling
load_portfolio <- function(account, chain) {
tryCatch({
json <- get_os_account_nfts(account, chain)
data <- fromJSON(json)
as.data.table(data$nfts)
}, error = function(e) {
message("Error loading portfolio: ", e$message)
data.table(name = character(), image_url = character())
})
}
ui <- fluidPage(
tags$head(tags$style(HTML("
.nft-card {
display: inline-block;
margin: 15px;
width: 250px;
border: 1px solid #ddd;
border-radius: 8px;
padding: 10px;
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
}
.nft-card img {
width: 100%;
border-radius: 5px;
}
"))),
titlePanel("NFT Portfolio Gallery"),
sidebarLayout(
sidebarPanel(
textInput("wallet", "Wallet Address",
value = "0xa16DCcD55139D5eF5B5Ff776553ef080EB6258fc"
),
selectInput("chain", "Blockchain",
choices = c("ethereum", "matic", "arbitrum", "base")
),
actionButton("load", "Load Portfolio", class = "btn-primary"),
hr(),
textOutput("status")
),
mainPanel(
uiOutput("gallery")
)
)
)
server <- function(input, output, session) {
portfolio <- reactiveVal(data.table())
observeEvent(input$load, {
output$status <- renderText("Loading...")
portfolio_data <- load_portfolio(input$wallet, input$chain)
portfolio(portfolio_data)
output$status <- renderText(
stringr::str_glue("Loaded {nrow(portfolio_data)} NFTs")
)
})
output$gallery <- renderUI({
data <- portfolio()
if (nrow(data) == 0) {
return(p("No NFTs to display. Click 'Load Portfolio' to start."))
}
cards <- lapply(1:min(50, nrow(data)), function(i) {
nft <- data[i]
div(class = "nft-card",
img(src = nft$image_url),
h4(nft$name),
p(nft$collection),
getOpenseaBadge(
contract = nft$contract,
id = nft$identifier,
chain = input$chain,
width = "100px"
)
)
})
tagList(cards)
})
}
shinyApp(ui, server)Rate Limiting Best Practices
Recommended Delays
Based on OpenSea API limits:
| Operation | Recommended Delay | Notes |
|---|---|---|
| Single NFT fetch | None | One-off queries are fine |
| Batch fetching (<10) | 0.5s between requests | Conservative rate |
| Batch fetching (10-100) | 1s between requests | Safe for large batches |
| Pagination | 2s between pages | Avoid hitting limits |
Monitoring Rate Limits
Watch for HTTP 429 (Too Many Requests) errors:
safe_api_call <- function(fn, ..., retry_on_429 = TRUE) {
tryCatch({
fn(...)
}, error = function(e) {
if (stringr::str_detect(e$message, "429") && retry_on_429) {
message("Rate limit hit. Waiting 60s before retry...")
Sys.sleep(60)
fn(...) # Retry once
} else {
stop(e)
}
})
}
# Usage
nft_json <- safe_api_call(
get_os_nft,
id = 2,
chain = "matic",
contract = "0x6444522C5aD44285b7856dd3336D09Fb939865F1"
)Next Steps
- Get Started: Package overview and basic workflows
- Quickstart: Quick reference for common tasks
- Function Reference: Complete API documentation
Additional Resources
- OpenSea API Rate Limits
- OpenSea Data Models
- Blockchain Explorer APIs (for complementary data)
