curl --request GET \
--url https://jmpy.me/api/v1/analytics/timeline \
--header 'Authorization: Bearer <token>'{
"data": [
{
"date": "<string>",
"clicks": 123,
"unique_visitors": 123
}
]
}Get time-series click data with configurable granularity
curl --request GET \
--url https://jmpy.me/api/v1/analytics/timeline \
--header 'Authorization: Bearer <token>'{
"data": [
{
"date": "<string>",
"clicks": 123,
"unique_visitors": 123
}
]
}hour or day.# Get daily clicks for last 30 days
curl -X GET "https://jmpy.me/api/v1/analytics/timeline?days=30&granularity=day" \
-H "Authorization: Bearer YOUR_API_KEY"
# Get hourly clicks for last 7 days
curl -X GET "https://jmpy.me/api/v1/analytics/timeline?days=7&granularity=hour" \
-H "Authorization: Bearer YOUR_API_KEY"
# Get daily clicks for last year
curl -X GET "https://jmpy.me/api/v1/analytics/timeline?days=365&granularity=day" \
-H "Authorization: Bearer YOUR_API_KEY"
const fetch = require('node-fetch');
const response = await fetch(
'https://jmpy.me/api/v1/analytics/timeline?days=30&granularity=day',
{
headers: { 'Authorization': 'Bearer YOUR_API_KEY' }
}
);
const data = await response.json();
// Calculate totals
const totalClicks = data.data.reduce((sum, d) => sum + d.clicks, 0);
const totalVisitors = data.data.reduce((sum, d) => sum + d.unique_visitors, 0);
console.log(`Last 30 Days: ${totalClicks} clicks, ${totalVisitors} unique visitors`);
// Find peak day
const peakDay = data.data.reduce((max, d) => d.clicks > max.clicks ? d : max);
console.log(`Peak Day: ${peakDay.date} with ${peakDay.clicks} clicks`);
import axios from 'axios';
interface TimelinePoint {
date: string;
clicks: number;
unique_visitors: number;
}
const response = await axios.get<{ success: boolean; data: TimelinePoint[] }>(
'https://jmpy.me/api/v1/analytics/timeline',
{
headers: { 'Authorization': 'Bearer YOUR_API_KEY' },
params: { days: 30, granularity: 'day' }
}
);
// Prepare for chart library (e.g., Chart.js, Recharts)
const chartData = response.data.data.map(point => ({
date: new Date(point.date).toLocaleDateString(),
clicks: point.clicks,
visitors: point.unique_visitors
}));
console.log('Chart data ready:', chartData.length, 'points');
import requests
import matplotlib.pyplot as plt
from datetime import datetime
response = requests.get(
'https://jmpy.me/api/v1/analytics/timeline',
headers={'Authorization': 'Bearer YOUR_API_KEY'},
params={'days': 30, 'granularity': 'day'}
)
timeline = response.json()['data']
# Parse dates and values
dates = [datetime.fromisoformat(p['date'].replace('Z', '+00:00')) for p in timeline]
clicks = [p['clicks'] for p in timeline]
visitors = [p['unique_visitors'] for p in timeline]
# Create chart
plt.figure(figsize=(12, 6))
plt.plot(dates, clicks, label='Clicks', color='blue')
plt.plot(dates, visitors, label='Unique Visitors', color='green')
plt.xlabel('Date')
plt.ylabel('Count')
plt.title('Click Timeline - Last 30 Days')
plt.legend()
plt.xticks(rotation=45)
plt.tight_layout()
plt.savefig('timeline.png')
print("Chart saved to timeline.png")
<?php
$client = new GuzzleHttp\Client();
$response = $client->request('GET', 'https://jmpy.me/api/v1/analytics/timeline', [
'headers' => [
'Authorization' => 'Bearer YOUR_API_KEY'
],
'query' => [
'days' => 30,
'granularity' => 'day'
]
]);
$timeline = json_decode($response->getBody(), true)['data'];
// Calculate stats
$totalClicks = array_sum(array_column($timeline, 'clicks'));
$avgClicks = $totalClicks / count($timeline);
echo "Timeline Summary (Last 30 Days)\n";
echo "================================\n";
echo "Total Clicks: $totalClicks\n";
echo "Average Daily Clicks: " . round($avgClicks, 1) . "\n";
echo "Data Points: " . count($timeline) . "\n";
?>
package main
import (
"fmt"
"net/http"
"net/url"
"io"
)
func main() {
baseURL := "https://jmpy.me/api/v1/analytics/timeline"
params := url.Values{}
params.Add("days", "30")
params.Add("granularity", "day")
req, _ := http.NewRequest("GET", baseURL+"?"+params.Encode(), nil)
req.Header.Add("Authorization", "Bearer YOUR_API_KEY")
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
fmt.Println(string(body))
}
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.URI;
import java.net.http.HttpResponse;
String url = "https://jmpy.me/api/v1/analytics/timeline?days=30&granularity=day";
HttpClient client = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(url))
.header("Authorization", "Bearer YOUR_API_KEY")
.GET()
.build();
HttpResponse<String> response = client.send(request,
HttpResponse.BodyHandlers.ofString());
System.out.println(response.body());
{
"success": true,
"data": [
{
"date": "2024-01-01T00:00:00.000Z",
"clicks": 234,
"unique_visitors": 198
},
{
"date": "2024-01-02T00:00:00.000Z",
"clicks": 289,
"unique_visitors": 245
},
{
"date": "2024-01-03T00:00:00.000Z",
"clicks": 312,
"unique_visitors": 267
},
{
"date": "2024-01-04T00:00:00.000Z",
"clicks": 198,
"unique_visitors": 176
},
{
"date": "2024-01-05T00:00:00.000Z",
"clicks": 156,
"unique_visitors": 134
}
]
}
{
"success": true,
"data": [
{
"date": "2024-01-07T00:00:00.000Z",
"clicks": 12,
"unique_visitors": 10
},
{
"date": "2024-01-07T01:00:00.000Z",
"clicks": 8,
"unique_visitors": 7
},
{
"date": "2024-01-07T02:00:00.000Z",
"clicks": 5,
"unique_visitors": 5
},
{
"date": "2024-01-07T09:00:00.000Z",
"clicks": 45,
"unique_visitors": 38
},
{
"date": "2024-01-07T10:00:00.000Z",
"clicks": 67,
"unique_visitors": 54
}
]
}
{
"success": false,
"error": {
"code": "UNAUTHORIZED",
"message": "User authentication required"
}
}
Build a clicks chart
// Using Chart.js
async function renderClicksChart(canvasId) {
const response = await fetch(
'https://jmpy.me/api/v1/analytics/timeline?days=30&granularity=day',
{ headers: { 'Authorization': 'Bearer YOUR_API_KEY' } }
);
const { data } = await response.json();
const ctx = document.getElementById(canvasId).getContext('2d');
new Chart(ctx, {
type: 'line',
data: {
labels: data.map(d => new Date(d.date).toLocaleDateString()),
datasets: [
{
label: 'Clicks',
data: data.map(d => d.clicks),
borderColor: '#3b82f6',
tension: 0.4
},
{
label: 'Unique Visitors',
data: data.map(d => d.unique_visitors),
borderColor: '#10b981',
tension: 0.4
}
]
},
options: {
responsive: true,
plugins: {
title: { display: true, text: 'Clicks Over Time' }
}
}
});
}
Calculate growth rate
import requests
from datetime import datetime, timedelta
def calculate_growth():
response = requests.get(
'https://jmpy.me/api/v1/analytics/timeline',
headers={'Authorization': 'Bearer YOUR_API_KEY'},
params={'days': 14, 'granularity': 'day'}
)
timeline = response.json()['data']
# Split into this week and last week
this_week = timeline[7:] # Last 7 days
last_week = timeline[:7] # Previous 7 days
this_week_clicks = sum(d['clicks'] for d in this_week)
last_week_clicks = sum(d['clicks'] for d in last_week)
if last_week_clicks > 0:
growth = ((this_week_clicks - last_week_clicks) / last_week_clicks) * 100
else:
growth = 100 if this_week_clicks > 0 else 0
print(f"This Week: {this_week_clicks} clicks")
print(f"Last Week: {last_week_clicks} clicks")
print(f"Growth: {growth:+.1f}%")
return growth
Identify peak hours
interface HourlyPattern {
hour: number;
avgClicks: number;
label: string;
}
async function findPeakHours(): Promise<HourlyPattern[]> {
const response = await fetch(
'https://jmpy.me/api/v1/analytics/timeline?days=7&granularity=hour',
{ headers: { 'Authorization': 'Bearer YOUR_API_KEY' } }
);
const { data } = await response.json();
// Group by hour of day
const hourlyData: Record<number, number[]> = {};
data.forEach(point => {
const hour = new Date(point.date).getHours();
if (!hourlyData[hour]) hourlyData[hour] = [];
hourlyData[hour].push(point.clicks);
});
// Calculate averages
const patterns = Object.entries(hourlyData)
.map(([hour, clicks]) => ({
hour: parseInt(hour),
avgClicks: clicks.reduce((a, b) => a + b, 0) / clicks.length,
label: `${hour.toString().padStart(2, '0')}:00`
}))
.sort((a, b) => b.avgClicks - a.avgClicks);
console.log('Peak Hours (by avg clicks):');
patterns.slice(0, 5).forEach((p, i) => {
console.log(` ${i + 1}. ${p.label}: ${p.avgClicks.toFixed(1)} avg clicks`);
});
return patterns;
}
Detect anomalies
async function detectAnomalies(threshold = 2) {
const response = await fetch(
'https://jmpy.me/api/v1/analytics/timeline?days=30&granularity=day',
{ headers: { 'Authorization': 'Bearer YOUR_API_KEY' } }
);
const { data } = await response.json();
// Calculate mean and standard deviation
const clicks = data.map(d => d.clicks);
const mean = clicks.reduce((a, b) => a + b, 0) / clicks.length;
const variance = clicks.reduce((sum, c) => sum + Math.pow(c - mean, 2), 0) / clicks.length;
const stdDev = Math.sqrt(variance);
// Find anomalies (outside threshold * stdDev)
const anomalies = data.filter(d => {
const zScore = Math.abs((d.clicks - mean) / stdDev);
return zScore > threshold;
});
console.log(`Mean: ${mean.toFixed(1)} clicks/day`);
console.log(`Std Dev: ${stdDev.toFixed(1)}`);
console.log(`\nAnomalies (${threshold}σ threshold):`);
anomalies.forEach(a => {
const zScore = (a.clicks - mean) / stdDev;
const type = zScore > 0 ? '📈 Spike' : '📉 Drop';
console.log(` ${type}: ${a.date} - ${a.clicks} clicks (${zScore.toFixed(1)}σ)`);
});
return anomalies;
}