Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
c384ecffe8 | |||
1140685e19 | |||
269fad41a9 |
133
app.py
Executable file
133
app.py
Executable file
@ -0,0 +1,133 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import flask
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
|
||||
app = flask.Flask(__name__)
|
||||
|
||||
def hit(x):
|
||||
pass
|
||||
try:
|
||||
import stats
|
||||
hit = stats.engine.hit
|
||||
except ImportError:
|
||||
stats = None
|
||||
|
||||
# this is the root path for news.xml server, must end it /
|
||||
# i.e. http://news.psi.i2p/news/
|
||||
# defaults to /
|
||||
ROOT='/'
|
||||
port=9696
|
||||
if len(sys.argv) > 1:
|
||||
ROOT=sys.argv[1]
|
||||
if len(sys.argv) > 2:
|
||||
port = int(sys.argv[2])
|
||||
|
||||
def has_lang(lang):
|
||||
"""
|
||||
:return True if we have news for a language:
|
||||
"""
|
||||
logging.info("Checking for language: " + lang)
|
||||
if '.' in lang or '/' in lang:
|
||||
return False
|
||||
return os.path.exists(os.path.join(app.static_folder, 'news_{}.su3'.format(lang)))
|
||||
|
||||
def serve_platform_feed(osname, branch):
|
||||
logging.info("Serving: "+ osname + " Branch: " + branch)
|
||||
lang = flask.request.args.get('lang', 'en')
|
||||
lang = lang.split('_')[0]
|
||||
hit(lang)
|
||||
fname = os.path.join(osname, branch, 'news.su3')
|
||||
if has_lang(lang):
|
||||
fname = os.path.join(osname, branch, 'news_{}.su3'.format(lang))
|
||||
return serveFile(os.path.join(app.static_folder, fname))
|
||||
|
||||
def serveFile(path):
|
||||
logging.info("Serving file: "+ path)
|
||||
return flask.send_file(path)
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
"""
|
||||
serve news stats page
|
||||
"""
|
||||
logging.info("Serving news stats page")
|
||||
return flask.render_template('index.html',root=ROOT)
|
||||
|
||||
@app.route('/news.su3')
|
||||
def news_su3():
|
||||
"""
|
||||
serve news.su3
|
||||
"""
|
||||
logging.info("Serving standard newsfeed")
|
||||
return serve_platform_feed("", "")
|
||||
|
||||
@app.route('/mac-arm64/stable/news.su3')
|
||||
def news_mac_arm_stable_su3():
|
||||
"""
|
||||
serve mac-arm64/stable/news.su3
|
||||
"""
|
||||
return serve_platform_feed("mac-arm64", "stable")
|
||||
|
||||
@app.route('/mac/stable/news.su3')
|
||||
def news_mac_stable_su3():
|
||||
"""
|
||||
serve mac/stable/news.su3
|
||||
"""
|
||||
return serve_platform_feed("mac", "stable")
|
||||
|
||||
@app.route('/win/beta/news.su3')
|
||||
def news_win_beta_su3():
|
||||
"""
|
||||
serve win/beta/news.su3
|
||||
"""
|
||||
return serve_platform_feed("win", "beta")
|
||||
|
||||
@app.route('/netsize.svg')
|
||||
def netsize_svg():
|
||||
"""
|
||||
generate and serve network size svg
|
||||
"""
|
||||
if stats:
|
||||
args = flask.request.args
|
||||
try:
|
||||
window = int(args['window'])
|
||||
tslice = int(args['tslice'])
|
||||
mult = int(args['mult'])
|
||||
resp = flask.Response(stats.engine.netsize(tslice, window, mult))
|
||||
resp.mimetype = 'image/svg+xml'
|
||||
return resp
|
||||
except Exception as e:
|
||||
print (e)
|
||||
flask.abort(503)
|
||||
# we don't have stats to show, stats module not imported
|
||||
flask.abort(404)
|
||||
|
||||
|
||||
@app.route('/requests.svg')
|
||||
def requests_svg():
|
||||
"""
|
||||
generate and serve requests per interval graph
|
||||
"""
|
||||
args = flask.request.args
|
||||
if stats:
|
||||
try:
|
||||
window = int(args['window'])
|
||||
tslice = int(args['tslice'])
|
||||
mult = int(args['mult'])
|
||||
resp = flask.Response(stats.engine.requests(tslice, window, mult))
|
||||
resp.mimetype = 'image/svg+xml'
|
||||
return resp
|
||||
except Exception as e:
|
||||
print (e)
|
||||
flask.abort(503)
|
||||
flask.abort(404)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# run it
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
app.run('127.0.0.1', port)
|
@ -1,2 +1,3 @@
|
||||
export venv_dir="env"
|
||||
export venv="`which virtualenv-2.7 || which virtualenv`"
|
||||
export venv="`which virtualenv`"
|
||||
#virtualenv-2.7 || which virtualenv`"
|
||||
|
@ -69,7 +69,7 @@ class Release(object):
|
||||
if self.__release_min_java_version is not None:
|
||||
release.attrib['minJavaVersion'] = self.__release_min_java_version
|
||||
|
||||
for update_type, update in self.__release_updates.items():
|
||||
for update_type, update in list(self.__release_updates.items()):
|
||||
update_node = etree.SubElement(release, '{%s}update' % I2P_NS)
|
||||
update_node.attrib['type'] = update_type
|
||||
|
||||
@ -159,7 +159,7 @@ class Revocations(object):
|
||||
|
||||
revocations = etree.Element('{%s}revocations' % I2P_NS)
|
||||
|
||||
for crl_id, crl in self.__revocations_crls.items():
|
||||
for crl_id, crl in list(self.__revocations_crls.items()):
|
||||
crl_node = etree.SubElement(revocations, '{%s}crl' % I2P_NS)
|
||||
crl_node.attrib['id'] = crl_id
|
||||
crl_node.attrib['updated'] = crl.updated().isoformat()
|
||||
|
@ -35,7 +35,7 @@ def load_entries(fg, entries_file, platform_entries_file=None):
|
||||
metadatas = {}
|
||||
finalentries = {}
|
||||
|
||||
print('Loading entries from %s' % entries_file)
|
||||
print(('Loading entries from %s' % entries_file))
|
||||
entries = prepare_entries_file(fg, entries_file)
|
||||
|
||||
# split() creates a junk final element with trailing </div>
|
||||
@ -46,7 +46,7 @@ def load_entries(fg, entries_file, platform_entries_file=None):
|
||||
finalentries[md['id']] = entry_parts[1]
|
||||
|
||||
if os.path.exists(platform_entries_file) and platform_entries_file != entries_file and platform_entries_file is not None and platform_entries_file != "data/entries.html":
|
||||
print('Loading platform entries from %s' % platform_entries_file)
|
||||
print(('Loading platform entries from %s' % platform_entries_file))
|
||||
entries = prepare_entries_file(fg, platform_entries_file)
|
||||
for entry_str in entries[:-1]:
|
||||
entry_parts = entry_str.split('</details>', 1)
|
||||
@ -56,7 +56,7 @@ def load_entries(fg, entries_file, platform_entries_file=None):
|
||||
|
||||
sorted_metadata = collections.OrderedDict(sorted(metadatas.items()))
|
||||
|
||||
for metadata in sorted_metadata.values():
|
||||
for metadata in list(sorted_metadata.values()):
|
||||
fe = fg.add_entry()
|
||||
fe.id(metadata['id'])
|
||||
fe.title(metadata['title'])
|
||||
@ -73,7 +73,7 @@ def prepare_entries_file(fg, entries_file=None):
|
||||
with open(entries_file) as f:
|
||||
entries_data = f.read().strip('\n')
|
||||
# Replace HTML non-breaking space with unicode
|
||||
entries_data = entries_data.replace(' ', '\u00a0')
|
||||
entries_data = entries_data.replace(' ', '\\u00a0')
|
||||
# Strip the leading <div> from translations
|
||||
if entries_data.startswith('<div>'):
|
||||
entries_data = entries_data[5:]
|
||||
@ -103,7 +103,7 @@ def load_releases(fg):
|
||||
if 'minJavaVersion' in release:
|
||||
r.min_java_version(release['minJavaVersion'])
|
||||
|
||||
for update_type, update in release['updates'].items():
|
||||
for update_type, update in list(release['updates'].items()):
|
||||
u = r.add_update(update_type)
|
||||
if 'clearnet' in update:
|
||||
for url in update['clearnet']:
|
||||
|
259
stats.py
Executable file
259
stats.py
Executable file
@ -0,0 +1,259 @@
|
||||
#
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
import os
|
||||
import time
|
||||
|
||||
# try importing redis redis
|
||||
try:
|
||||
import redis
|
||||
except ImportError:
|
||||
print("redis not available, fall back to volatile stats backend")
|
||||
redis = None
|
||||
|
||||
# try importing pygal
|
||||
try:
|
||||
import pygal
|
||||
except ImportError:
|
||||
print("pygal not available, fall back to text based stats")
|
||||
pygal = None
|
||||
pygal =None
|
||||
|
||||
__doc__ = """
|
||||
statistics backend optionally using redis
|
||||
"""
|
||||
|
||||
|
||||
class RedisDB:
|
||||
"""
|
||||
redis based backend for storing stats
|
||||
"""
|
||||
def __init__(self):
|
||||
self._redis = redis.Redis()
|
||||
self.exists = self._redis.exists
|
||||
self.get = self._redis.get
|
||||
self.set = self._redis.set
|
||||
|
||||
class DictDB:
|
||||
"""
|
||||
volatile dictionary based database backend for storing stats in memory
|
||||
"""
|
||||
def __init__(self):
|
||||
self._d = dict()
|
||||
|
||||
def get(self, k):
|
||||
if self.exists(k):
|
||||
return self._d[k]
|
||||
|
||||
def set(self, k, v):
|
||||
self._d[k] = v
|
||||
|
||||
def exists(self, k):
|
||||
return k in self._d
|
||||
|
||||
|
||||
class Grapher:
|
||||
"""
|
||||
generic grapher that does nothing
|
||||
"""
|
||||
|
||||
def collect(self, data_sorted, multiplier, calc_netsize):
|
||||
"""
|
||||
do the magic calculations
|
||||
yields (x, netsize_y, rph_y)
|
||||
"""
|
||||
total = 0
|
||||
hours = 0
|
||||
req_s = []
|
||||
netsize_s = []
|
||||
window = []
|
||||
for hour, val in data_sorted:
|
||||
years = hour / ( 365 * 24 )
|
||||
days = ( hour - years * 365 * 24 ) / 24
|
||||
hours = hour - ( ( years * 365 * 24 ) + ( days * 24 ) )
|
||||
hour = datetime.datetime.strptime('%0.4d_%0.3d_%0.2d' % (years, days, hours), '%Y_%j_%H')
|
||||
if val > 0:
|
||||
total += val
|
||||
hours += 1
|
||||
per_hour = float(total) / hours
|
||||
window.append(val)
|
||||
while len(window) > window_len:
|
||||
window.pop(0)
|
||||
mean = sum(window) / len(window)
|
||||
netsize = int(calc_netsize(mean, multiplier))
|
||||
yield (hour, netsize, val)
|
||||
|
||||
def generate(self, data_sorted, multiplier, calc_netsize):
|
||||
"""
|
||||
:param data_sorted: sorted list of (hour, hitcount) tuple
|
||||
:param multiplier: multiplier to use on graph Y axis
|
||||
:param calc_netsize: function that calculates the network size given a mean value and multiplier
|
||||
:return (netsize, requests) graph tuple:
|
||||
"""
|
||||
|
||||
class SVGText:
|
||||
"""
|
||||
svg hold text
|
||||
"""
|
||||
def __init__(self, data='undefined'):
|
||||
self.data = data
|
||||
|
||||
def render(self):
|
||||
return """<?xml version="1.0" standalone="no"?>
|
||||
<svg viewBox="0 0 80 40" xmlns="http://www.w3.org/2000/svg">
|
||||
<desc>fallback svg</desc>
|
||||
<rect x="0" y="0" width="80" height="40" stroke="red" fill="None">
|
||||
</rect>
|
||||
<text x="30" y="20">{}</text>
|
||||
</svg>
|
||||
""".format(self.data)
|
||||
|
||||
class TextGrapher(Grapher):
|
||||
"""
|
||||
generates svg manually that look like ass
|
||||
"""
|
||||
|
||||
def generate(self, data_sorted, multiplier, calc_netsize):
|
||||
nsize = 0
|
||||
rph = 0
|
||||
t = 0
|
||||
for hour, netsize, reqs in self.collect(data_sorted, multiplier, calc_netsize):
|
||||
t += 1
|
||||
nsize += netsize
|
||||
rpy += reqs
|
||||
if t:
|
||||
nsize /= t
|
||||
rph /= t
|
||||
return SVGText("MEAN NETSIZE: {} routers".format(nsize)), SVGText("MEAN REQUETS: {} req/hour".format(rph))
|
||||
|
||||
class PygalGrapher(Grapher):
|
||||
"""
|
||||
generates svg graphs using pygal
|
||||
"""
|
||||
|
||||
def generate(self, data_sorted, multiplier, calc_netsize):
|
||||
|
||||
_netsize_graph = pygal.DateY(show_dots=False,x_label_rotation=20)
|
||||
_requests_graph = pygal.DateY(show_dots=False,x_label_rotation=20)
|
||||
|
||||
_netsize_graph.title = 'Est. Network Size (multiplier: %d)' % multiplier
|
||||
_requests_graph.title = 'Requests Per Hour'
|
||||
|
||||
netsize_s, req_s = list(), list()
|
||||
for hour, netsize, reqs in self.collect(data_sorted, multiplier, calc_netsize):
|
||||
netsize_s.append((hour, netsize))
|
||||
req_s.append((hour, reqs))
|
||||
|
||||
_netsize_graph.add('Routers', netsize_s)
|
||||
_requests_graph.add('news.xml Requests', req_s)
|
||||
return _netsize_graph, _requests_graph
|
||||
|
||||
|
||||
class StatsEngine:
|
||||
"""
|
||||
Stats engine for news.xml
|
||||
"""
|
||||
|
||||
_log = logging.getLogger('StatsEngine')
|
||||
|
||||
def __init__(self):
|
||||
self._cfg_fname = 'settings.json'
|
||||
if redis:
|
||||
self._db = RedisDB()
|
||||
try:
|
||||
self._db.exists('nothing')
|
||||
except:
|
||||
self._log.warn("failed to connect to redis, falling back to volatile stats backend")
|
||||
self._db = DictDB()
|
||||
else:
|
||||
self._db = DictDB()
|
||||
if pygal:
|
||||
self._graphs = PygalGrapher()
|
||||
else:
|
||||
self._graphs = TextGrapher()
|
||||
|
||||
self._last_hour = self.get_hour()
|
||||
|
||||
def _config_str(self, name):
|
||||
with open(self._cfg_fname) as f:
|
||||
return str(json.load(f)[name])
|
||||
|
||||
def _config_int(self, name):
|
||||
with open(self._cfg_fname) as f:
|
||||
return int(json.load(f)[name])
|
||||
|
||||
def multiplier(self):
|
||||
return self._config_int('mult')
|
||||
|
||||
def tslice(self):
|
||||
return self._config_int('slice')
|
||||
|
||||
def window_len(self):
|
||||
return self._config_int('winlen')
|
||||
|
||||
@staticmethod
|
||||
def get_hour():
|
||||
"""
|
||||
get the current our as an int
|
||||
"""
|
||||
dt = datetime.datetime.utcnow()
|
||||
return dt.hour + (int(dt.strftime('%j')) * 24 ) + ( dt.year * 24 * 365 )
|
||||
|
||||
def calc_netsize(self, per_hour, mult):
|
||||
return float(per_hour) * 24 / 1.5 * mult
|
||||
|
||||
@staticmethod
|
||||
def _hour_key(hour):
|
||||
return 'newsxml.hit.{}'.format(hour)
|
||||
|
||||
def hit(self, lang=None):
|
||||
"""
|
||||
record a request
|
||||
"""
|
||||
hour = self.get_hour()
|
||||
keyname = self._hour_key(hour)
|
||||
if not self._db.exists(keyname):
|
||||
self._db.set(keyname, '0')
|
||||
val = self._db.get(keyname)
|
||||
self._db.set(keyname, str(int(val) + 1))
|
||||
|
||||
def _load_data(self, hours):
|
||||
"""
|
||||
load hit data
|
||||
"""
|
||||
hour = self.get_hour()
|
||||
data = list()
|
||||
while hours > 0:
|
||||
keyname = self._hour_key(hour)
|
||||
val = self._db.get(keyname)
|
||||
if val:
|
||||
data.append((hour, int(val)))
|
||||
hour -= 1
|
||||
hours -= 1
|
||||
return data
|
||||
|
||||
def regen_graphs(self, tslice, window_len, mult):
|
||||
data = self._load_data(tslice)
|
||||
data_sorted = sorted(data, key=operator.itemgetter(0))
|
||||
if len(data_sorted) > tslice:
|
||||
data_sorted = data_sorted[-tslice:]
|
||||
self._netsize_graph, self._requests_graph = self._graphs.generate(data_sorted, self.multiplier(), self.calc_netsize)
|
||||
|
||||
|
||||
|
||||
def netsize(self, tslice, window, mult):
|
||||
#if not hasattr(self,'_netsize_graph'):
|
||||
self.regen_graphs(tslice, window, mult)
|
||||
return self._netsize_graph.render()
|
||||
|
||||
def requests(self, tslice, window, mult):
|
||||
#if not hasattr(self,'_requests_graph'):
|
||||
self.regen_graphs(tslice, window, mult)
|
||||
return self._requests_graph.render()
|
||||
|
||||
|
||||
engine = StatsEngine()
|
Reference in New Issue
Block a user