library(artpipelines)
library(lubridate)
canvas <- "path/to/artwork.procreate"
# Extract creation window with detailed output
result <- verify_creation_period(canvas, verbose = TRUE)Overview
This guide covers advanced artpipelines workflows including:
- Provenance verification with defensible creation windows
- Frame-by-frame color analysis
- Custom extraction workflows
- Batch processing patterns
- Error recovery strategies
Provenance Verification
The verify_creation_period() function computes a forensically defensible creation window from Procreate canvas metadata.
Computing a Defensible Window
Understanding the Results
# Start time (UTC) - from MP4 mvhd atoms
result$start_utc
# [1] "2024-03-10 09:15:23 UTC"
# Stop time (UTC) - from filtered tile writes
result$stop_utc
# [1] "2024-03-15 14:30:45 UTC"
# As a lubridate interval (for calculations)
result$interval_utc
# [1] 2024-03-10 09:15:23 UTC--2024-03-15 14:30:45 UTC
# Duration in hours
time_length(result$interval_utc, "hours")
# [1] 125.25
# Timezone offset inferred from file metadata
result$tz_offset_hours
# [1] -7 # Pacific Daylight Time
# Confidence levels
result$start_confidence # "high" - from QuickTime MP4 header
result$stop_confidence # "moderate" - from tile write filteringHow It Works
The function uses multiple signals:
Start time: Anchored on the earliest non-empty timelapse MP4 segment’s
mvhdcreation time (QuickTime epoch, UTC). This is high confidence because it comes from the video recording start.Stop time: Derived from the latest tile file modification time, after filtering out “resave windows” (bulk saves that happen when re-opening the file). Local ZIP times are translated to UTC using an inferred timezone offset.
Timezone inference: Compares local file times against UTC metadata to compute the artist’s likely timezone offset.
Using for Authenticity Claims
# Build a provenance statement
window <- verify_creation_period(canvas)
sprintf(
"This artwork was created between %s and %s UTC (approximately %.1f hours)",
format(window$start_utc, "%Y-%m-%d %H:%M"),
format(window$stop_utc, "%Y-%m-%d %H:%M"),
time_length(window$interval_utc, "hours")
)
# [1] "This artwork was created between 2024-03-10 09:15 and 2024-03-15 14:30 UTC (approximately 125.3 hours)"Frame-by-Frame Color Analysis
Extract detailed color information from timelapse frames.
Building the Color Dataset
artist <- "746b8207-72f5-4ab6-8d19-a91d03daec3d"
artwork <- "99a61148-1d3b-4340-8cf6-92ad26046b0f"
new_utc <- lubridate::now("UTC")
# First, extract timelapse frames to CDN
extract_timelapse(artist, artwork, "path/to/timelapse.mp4", new_utc)
# Then build color dataset from frames
colors <- create_color_dataset(artist, artwork, new_utc)
# Structure of color data
head(colors)
#> frame_num hex_color count percentage luminance
#> 1: 1 #2A3B4C 1250 12.5 0.28
#> 2: 1 #F5E6D7 980 9.8 0.91
#> 3: 1 #8C7B6A 750 7.5 0.52Creating Color Visualizations
# Generate color progression plots
create_color_plots(artist, artwork, colors)
# Uploads plots to CDN: processed/{artist}/{artwork}/mod-frames/graphs/Frame Analytics
Compute detailed frame-by-frame metrics:
# Requires artwork_stats for context
artwork_stats <- extract_stats_procreate(artist, artwork, fp = NULL, new_utc)
analytics <- create_frame_analytics(
artist = artist,
artwork = artwork,
artwork_colors = colors,
artwork_stats = artwork_stats
)
head(analytics)
#> frame_num n_colors dominant_color color_entropy complexity_delta
#> 1: 1 45 #2A3B4C 2.34 0.00
#> 2: 2 52 #2A3B4C 2.45 0.11
#> 3: 3 68 #8C7B6A 2.67 0.22Derived Frame Metrics
Calculate summary metrics from frame analytics:
metrics <- calc_frame_metrics(
frame_analytics = analytics,
n_unique_colors = artwork_stats$n_unique_colors,
brush_strokes = artwork_stats$brush_strokes,
drawing_hours = artwork_stats$drawing_hours
)
metrics$strokes_per_unique_color # Efficiency metric
metrics$color_generation_rate # Colors added per hour
metrics$frame_color_variance # Consistency metric
metrics$frame_color_stability # Workflow stability
metrics$technique_phase_count # Number of technique shifts
metrics$early_late_color_ratio # Color evolution patternCustom Extraction Workflows
Extract Only What You Need
For partial processing, use individual extraction functions:
# Just extract signature
extract_signature(artist, artwork, fp = "path/to/canvas.procreate")
# Uploads to: processed/{artist}/{artwork}/signature.png
# Just extract video segments
extract_video_segments(artist, artwork, "path/to/canvas.procreate")
# Uploads to: processed/{artist}/{artwork}/video-segments/
# Just create gallery images (resized versions)
create_gallery_images(artist, artwork)
# Requires main.png already uploaded
# Creates multiple sizes in: processed/{artist}/{artwork}/mod-gallery/Creation Window Extraction
For provenance without full pipeline:
# Extract just the creation window metadata
window_data <- extract_creation_window("path/to/canvas.procreate")
window_data$start_utc
window_data$stop_utc
window_data$duration_hours
window_data$confidenceAI Content Generation
Generate AI-powered descriptions and style classifications.
Style Classification
styles <- create_styles_tables(
artist = artist,
artwork = artwork,
img_path = "path/to/main.png",
art_title = "Portrait of Jeezy",
artist_name = "Bobby Fatemi",
drawing_hours = 12.5,
brush_strokes = 45000,
new_utc = new_utc
)
# Artwork-specific styles
styles$artwork_styles
#> tag tag_norm desc
#> 1: Digital Art digital_art Created using digital painting tools
#> 2: Portraiture portraiture Focus on capturing human likeness
#> 3: Urban Realism urban_realism Contemporary street-influenced style
# Global style tags (for platform-wide categorization)
styles$global_styles
#> style_tag count
#> 1: digital_art 156
#> 2: portraiture 89Artwork Profiles
profiles <- create_artwork_profiles(
artist_name = "Bobby Fatemi",
drawing_hours = 12.5,
art_title = "Portrait of Jeezy",
img_path = "path/to/main.png",
artwork = artwork,
artist = artist,
art_story = "Hip-hop culture celebration",
new_utc = new_utc
)
profiles$category # "Portrait"
profiles$style # "Digital Realism"
profiles$methodology # "Layered digital painting with..."
profiles$color # "Earth tones with vibrant accents..."
profiles$details # Extended descriptionOpenSea Integration
Link artworks to NFT marketplace listings.
# Create OpenSea metadata record
opensea_record <- create_artwork_opensea(
artist = artist,
artwork = artwork,
nft_url = "https://opensea.io/assets/matic/0x644.../123",
new_utc = new_utc
)
# Fetch and store artist's OpenSea profile
artist_opensea <- create_artist_opensea(artist, new_utc)Error Recovery
Handling Pipeline Failures
# Check for failed jobs
cn <- artcore::dbc()
on.exit(artcore::dbd(cn), add = TRUE)
failed_jobs <- DBI::dbGetQuery(
cn,
"SELECT * FROM pipeline.jobs WHERE status = 'Error';"
) |>
data.table::as.data.table()
# For each failed job, check the error message
failed_jobs$msg
# [1] "Failed in extract_stats_procreate"
# Resume from the failed step manually
# (First, fix the underlying issue)
# Re-run specific extraction
extract_stats_procreate(
artist = failed_jobs$artist_uuid[1],
artwork = failed_jobs$art_uuid[1],
fp = NULL,
new_utc = lubridate::now("UTC")
)
# Update job status manually if needed
job_update(failed_jobs$job_id[1], status = "Success", msg = "Manually recovered")Performance Considerations
Timelapse Frame Extraction
Frame extraction is the most resource-intensive step:
# For very long timelapses (>1 hour), consider:
# 1. Running on a machine with sufficient disk space
# 2. Using background processing
# The extract_timelapse function:
# - Extracts one frame per second of video
# - Uploads JPEG frames to CDN
# - Can generate thousands of filesBatch Processing Pattern
For processing multiple artworks:
artworks_to_process <- list(
list(artist = "uuid1", artwork = "uuid1", files = list(...)),
list(artist = "uuid2", artwork = "uuid2", files = list(...))
)
results <- lapply(artworks_to_process, function(art) {
tryCatch(
{
do.call(run_pipeline, art)
},
error = function(e) {
list(status = "error", message = e$message, artwork = art$artwork)
}
)
})
# Check for failures
failures <- Filter(function(x) !is.null(x$status) && x$status == "error", results)
length(failures)
# [1] 0Next Steps
- Function Reference - Complete API documentation with all parameters
- Get Started - Return to basics
