全球主机交流论坛

 找回密码
 注册

QQ登录

只需一步,快速开始

CeraNetworks网络延迟测速工具IP归属甄别会员请立即修改密码
查看: 294|回复: 1

[美国VPS] 分享JMS流量统计面板

[复制链接]
发表于 2025-3-14 16:25:13 | 显示全部楼层 |阅读模式
本帖最后由 雪秋千 于 2025-3-14 18:05 编辑

各位大侠有justmysocks的API的流量统计面板吗?自己用GPT写的太丑。
下面是自己写的效果预览图
Screenshot 2025-03-14 180331.png (65.11 KB, 下载次数: 2)
 楼主| 发表于 2025-3-14 18:02:58 | 显示全部楼层
先把我的放上来吧
1. 首先是本地存储数据的,定期30秒运行一次。因为jms更新时间差不多就是半分钟。
  1. import requests
  2. import json
  3. from datetime import datetime

  4. API_URL = "https://justmysocks6.net/members/getbwcounter.php?service="
  5. HISTORY_FILE = "扶墙_usage_history.json"

  6. def fetch_bw_counter():
  7.     response = requests.get(API_URL)
  8.     data = response.json()
  9.     # data = {"monthly_bw_limit_b": 1000000000000,
  10.     #         "bw_counter_b": 651529129,
  11.     #         "bw_reset_day_of_month": 13}
  12.     return data["bw_counter_b"]

  13. def load_history():
  14.     try:
  15.         with open(HISTORY_FILE, "r") as f:
  16.             return json.load(f)
  17.     except (IOError, ValueError):
  18.         return []

  19. def save_history(history):
  20.     with open(HISTORY_FILE, "w") as f:
  21.         json.dump(history, f, indent=2)

  22. def record_usage():
  23.     # 1) Fetch the current usage
  24.     current_bw = fetch_bw_counter()
  25.     timestamp = datetime.utcnow().isoformat()
  26.    
  27.     # 2) Append new record to history
  28.     history = load_history()
  29.     history.append({
  30.         "timestamp": timestamp,
  31.         "bw_counter_b": current_bw
  32.     })
  33.     save_history(history)
  34.    
  35. if __name__ == "__main__":
  36.     record_usage()
复制代码


2. 第二是显示面板,第一个面板是总体使用量,第二个是在不同时间的速率,
  1. import json
  2. import pandas as pd
  3. from datetime import timedelta
  4. import dash
  5. from dash import dcc, html, Input, Output
  6. import plotly.graph_objs as go

  7. # ----- Utility Functions -----

  8. def load_usage_data(file_path="扶墙_usage_history.json"):
  9.     """Load usage data from JSON, localize to America/Los_Angeles, then convert to UTC.
  10.        Plotly will automatically render these timestamps in the visitor’s local time.
  11.     """
  12.     with open(file_path, "r") as f:
  13.         data = json.load(f)
  14.     df = pd.DataFrame(data)
  15.     # Assume timestamps in the file are in California time.
  16.     df['timestamp'] = pd.to_datetime(df['timestamp']).dt.tz_localize('America/Los_Angeles')
  17.     # Convert to UTC for consistent plotting.
  18.     df['timestamp'] = df['timestamp'].dt.tz_convert('UTC')
  19.     df.sort_values('timestamp', inplace=True)
  20.     return df

  21. def convert_bytes(value_bytes):
  22.     """
  23.     Convert a byte value to a human-friendly string using a 1000 conversion factor.
  24.     If the value in GB is less than 0.001, display in MB.
  25.     If in MB is less than 0.001, display in B.
  26.     """
  27.     value_gb = value_bytes / 1e9
  28.     if value_gb >= 0.001:
  29.         return f"{value_gb:.3f} GB"
  30.     value_mb = value_bytes / 1e6
  31.     if value_mb >= 0.001:
  32.         return f"{value_mb:.3f} MB"
  33.     return f"{value_bytes} B"

  34. def aggregate_data(df, resolution, window):
  35.     """
  36.     Aggregate usage data for a given resolution and time window.
  37.     resolution: a pandas offset alias, e.g., 'T' for minute, 'H' for hour, 'D' for day, 'W' for week.
  38.     window: timedelta object representing the lookback period.
  39.     """
  40.     end_time = df['timestamp'].max()
  41.     start_time = end_time - window
  42.     df_window = df[df['timestamp'] >= start_time].copy()
  43.     if df_window.empty:
  44.         return pd.DataFrame(columns=['timestamp', 'bw_counter_b'])
  45.     df_window.set_index('timestamp', inplace=True)
  46.     df_resampled = df_window.resample(resolution).last().dropna()
  47.     df_resampled.reset_index(inplace=True)
  48.     return df_resampled

  49. def compute_usage_rates(df):
  50.     """
  51.     Compute the incremental usage (difference between consecutive bw_counter_b)
  52.     and time differences. Returns the DataFrame with a new column 'usage_diff'.
  53.     """
  54.     df = df.copy()
  55.     df['usage_diff'] = df['bw_counter_b'].diff()
  56.     df['time_diff_sec'] = df['timestamp'].diff().dt.total_seconds()
  57.     df['usage_rate'] = df['usage_diff'] / df['time_diff_sec']
  58.     return df

  59. # ----- Dash App Setup -----

  60. app = dash.Dash(__name__)
  61. server = app.server

  62. app.layout = html.Div([
  63.     html.H1("扶墙 Data Usage Dashboard"),
  64.     html.Div([
  65.         html.Button("Minutes", id="btn-minutes", n_clicks=0),
  66.         html.Button("Hourly", id="btn-hourly", n_clicks=0),
  67.         html.Button("Daily", id="btn-daily", n_clicks=0),
  68.         html.Button("Weekly", id="btn-weekly", n_clicks=0)
  69.     ], style={'marginBottom': '20px'}),
  70.     html.Div(id="summary-stats", style={'marginBottom': '20px'}),
  71.     dcc.Graph(id="usage-graph"),
  72.     dcc.Graph(id="rate-graph"),
  73.     dcc.Interval(id="interval-update", interval=60*1000, n_intervals=0)  # update every minute
  74. ])

  75. # ----- Callback to Update Graphs and Stats -----

  76. @app.callback(
  77.     [Output("usage-graph", "figure"),
  78.      Output("rate-graph", "figure"),
  79.      Output("summary-stats", "children")],
  80.     [Input("btn-minutes", "n_clicks"),
  81.      Input("btn-hourly", "n_clicks"),
  82.      Input("btn-daily", "n_clicks"),
  83.      Input("btn-weekly", "n_clicks"),
  84.      Input("interval-update", "n_intervals")]
  85. )
  86. def update_dashboard(n_min, n_hour, n_day, n_week, n_interval):
  87.     df = load_usage_data()
  88.    
  89.     # Determine which button was most recently pressed
  90.     ctx = dash.callback_context
  91.     if not ctx.triggered:
  92.         resolution_choice = 'H'
  93.         window = timedelta(hours=24)
  94.     else:
  95.         button_id = ctx.triggered[0]['prop_id'].split('.')[0]
  96.         if button_id == "btn-minutes":
  97.             resolution_choice = 'T'  # minute resolution
  98.             window = timedelta(hours=1)
  99.         elif button_id == "btn-hourly":
  100.             resolution_choice = 'H'
  101.             window = timedelta(hours=24)
  102.         elif button_id == "btn-daily":
  103.             resolution_choice = 'D'
  104.             window = timedelta(days=7)
  105.         elif button_id == "btn-weekly":
  106.             resolution_choice = 'W'
  107.             window = timedelta(weeks=4)
  108.         else:
  109.             resolution_choice = 'H'
  110.             window = timedelta(hours=24)
  111.    
  112.     df_agg = aggregate_data(df, resolution_choice, window)
  113.     df_rate = compute_usage_rates(df_agg)
  114.    
  115.     # ----- Cumulative Usage Figure -----
  116.     cum_fig = go.Figure()
  117.     cum_fig.add_trace(go.Scatter(
  118.         x=df_agg['timestamp'],
  119.         y=df_agg['bw_counter_b'] / 1e9,  # cumulative usage in GB
  120.         mode='lines+markers',
  121.         name="Cumulative Usage (GB)",
  122.         connectgaps=False
  123.     ))
  124.     cum_fig.update_layout(
  125.         title="扶墙 Cumulative Usage Over Time",
  126.         xaxis_title="Time",
  127.         yaxis_title="Usage (GB)",
  128.         hovermode="x unified"
  129.     )
  130.    
  131.     # ----- Usage Rate Figure -----
  132.     df_rate_clean = df_rate.dropna(subset=['usage_diff'])
  133.     if not df_rate_clean.empty:
  134.         max_diff = df_rate_clean['usage_diff'].max()
  135.         if max_diff / 1e9 >= 0.001:
  136.             factor = 1e9
  137.             y_label = "Usage per Interval (GB)"
  138.         elif max_diff / 1e6 >= 0.001:
  139.             factor = 1e6
  140.             y_label = "Usage per Interval (MB)"
  141.         else:
  142.             factor = 1
  143.             y_label = "Usage per Interval (B)"
  144.         usage_diff_converted = df_rate_clean['usage_diff'] / factor
  145.     else:
  146.         usage_diff_converted = []
  147.         y_label = "Usage per Interval"
  148.    
  149.     rate_fig = go.Figure()
  150.     rate_fig.add_trace(go.Scatter(
  151.         x=df_rate_clean['timestamp'],
  152.         y=usage_diff_converted,
  153.         mode='lines+markers',
  154.         name="Interval Usage",
  155.         connectgaps=False
  156.     ))
  157.     rate_fig.update_layout(
  158.         title="扶墙 Usage Rate Over Time",
  159.         xaxis_title="Time",
  160.         yaxis_title=y_label,
  161.         hovermode="x unified"
  162.     )
  163.    
  164.     # ----- Summary Statistics -----
  165.     if not df_rate['usage_rate'].dropna().empty:
  166.         avg_rate = df_rate['usage_rate'].dropna().mean()  # bytes per second
  167.         avg_per_min = convert_bytes(avg_rate * 60)
  168.         avg_per_hour = convert_bytes(avg_rate * 3600)
  169.         avg_per_day = convert_bytes(avg_rate * 3600 * 24)
  170.         avg_per_week = convert_bytes(avg_rate * 3600 * 24 * 7)
  171.     else:
  172.         avg_per_min = avg_per_hour = avg_per_day = avg_per_week = "N/A"
  173.    
  174.     summary = html.Div([
  175.         html.P(f"Average Usage per Minute: {avg_per_min}"),
  176.         html.P(f"Average Usage per Hour: {avg_per_hour}"),
  177.         html.P(f"Average Usage per Day: {avg_per_day}"),
  178.         html.P(f"Average Usage per Week: {avg_per_week}")
  179.     ])
  180.    
  181.     return cum_fig, rate_fig, summary

  182. if __name__ == '__main__':
  183.     app.run_server(debug=True)
复制代码
您需要登录后才可以回帖 登录 | 注册

本版积分规则

Archiver|手机版|小黑屋|全球主机交流论坛

GMT+8, 2025-4-20 03:12 , Processed in 0.069370 second(s), 10 queries , Gzip On, MemCache On.

Powered by Discuz! X3.4

© 2001-2023 Discuz! Team.

快速回复 返回顶部 返回列表