view adslstats.py @ 32:1af6865189ce

Update to work with Python 3.
author Daniel O'Connor <darius@dons.net.au>
date Sat, 14 Nov 2020 14:54:05 +1030
parents 39bf6dec0753
children 7d8bee5e3c80
line wrap: on
line source

#!/usr/bin/env python2
############################################################################
#
# Parse DSL link stats for iiNet TG-1 & generate RRD archives & graphs
#
############################################################################
#
# Copyright (C) 2017 Daniel O'Connor. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
#    notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
#    notice, this list of conditions and the following disclaimer in the
#    documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED.  IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
############################################################################

import base64
import binascii
import bs4
import configparser
import json
import mechanize
import mysrp as srp
import argparse
import os
import os.path
import re
import requests
import rrdtool
import sys
import time
import urllib.request, urllib.parse, urllib.error
from functools import reduce

conf = configparser.ConfigParser()
conf.add_section('global')

conflist = []
if ('HOME' in os.environ):
    conflist.append(os.path.expanduser('~/.adslstats.ini'))
conf.read(conflist)

usage = '''%prog [options]'''
parser = argparse.ArgumentParser(usage)
parser.add_argument('-v', '--verbose', action='store_true', default=False,
                    help='Enable debug output')
parser.add_argument('-g', '--graph', action='store_true', default=False,
                    help='Generate a graph')
parser.add_argument('-u', '--update', action='store_true', default=False,
                    help='Update RRD (implies -d)')
parser.add_argument('-m', '--munin', action='store', default=None,
                    help='Output munin data for ARG')
parser.add_argument('-a', '--username', action='store',
                    help='Username to login to modem')
parser.add_argument('-p', '--password', action='store',
                    help='Password to login to modem')
parser.add_argument('-n', '--name', action='store',
                    help='Hostname of modem')
parser.add_argument('-b', '--base', action='store',
                    help='Base directory for RRD & PNGs')
parser.add_argument('-c', '--cookiejar', action='store',
                    help='Location of cookiejar')
parser.add_argument('rest', nargs = '*')

args = parser.parse_args()

# Handle options from conf and override with ini
def opthelper(args, conf, optname):
    if args.__getattribute__(optname) == None:
        if not conf.has_option('global', optname):
            parser.error(optname + ' must be specified in config file or via commandline')
        else:
            args.__setattr__(optname, conf.get('global', optname))

opthelper(args, conf, 'username')
opthelper(args, conf, 'password')
opthelper(args, conf, 'name')
opthelper(args, conf, 'base')
opthelper(args, conf, 'cookiejar')

# Expand path names
args.cookiejar = os.path.expanduser(args.cookiejar)
args.base = os.path.expanduser(args.base)

rrdname = "%s.rrd" % (args.base)
graphbasename = args.base

class DSLStats(object):
    def __str__(self):
        s = '''Line Rate - Up: %d kbits, Down %d kbits
Maximum Rate - Up: %d kbit, Down %s kbit
Noise Margin - Up: %.1f dB, Down %.1f dB
Attenuation - Up: %.1f dB, Down %.1f dB
Power - Up: %.1f dBm, Down %.1f dBm
Uptime - %d sec''' % (self.upstream, self.downstream,
                      self.upstreammax, self.downstreammax,
                      self.nmup, self.nmdown,
                      self.attenup, self.attendown,
                      self.uppower, self.downpower,
                      self.uptime)
        return s

def getstats():
    stats = DSLStats()
    parser = configparser.ConfigParser()
    base = 'http://%s' % (args.name)
    br = mechanize.Browser()
    #br.set_debug_http(True)
    #br.set_debug_responses(True)
    #br.set_debug_redirects(True)
    cj = mechanize.LWPCookieJar()
    if os.path.exists(args.cookiejar):
        cj.load(args.cookiejar, ignore_discard = True)
    br.set_cookiejar(cj)
    if not fillstats(br, base, stats):
        if not authenticate(br, base, args.username, args.password):
            print('login failed')
            return None
        #print('login succeeded, getting stats')
        fillstats(br, base, stats)

    cj.save(args.cookiejar, ignore_discard = True)
    return stats

def authenticate(br, base, username, password):
    # Connect and authenticate
    r = br.open(base)
    bs = bs4.BeautifulSoup(r, 'lxml')
    token = bs.head.find(lambda tag: tag.has_attr('name') and tag['name'] == 'CSRFtoken')['content']
    #print('Got CSRF token ' + token)

    usr = srp.User(username, password, hash_alg = srp.SHA256, ng_type = srp.NG_2048)
    uname, A = usr.start_authentication()

    req = mechanize.Request(base + '/authenticate', data = urllib.parse.urlencode({'CSRFtoken' : token, 'I' : uname, 'A' : binascii.hexlify(A)}))
    r = br.open(req)
    j = json.decoder.JSONDecoder().decode(r.read())
    #print('Sent challenge, got ' + str(j))

    M = usr.process_challenge(binascii.unhexlify(j['s']), binascii.unhexlify(j['B']))
    req = mechanize.Request(base + '/authenticate', data = urllib.parse.urlencode({'CSRFtoken' : token, 'M' : binascii.hexlify(M)}))
    r = br.open(req)
    j = json.decoder.JSONDecoder().decode(r.read())
    #print('Got response ' + str(j))

    usr.verify_session(binascii.unhexlify(j['M']))
    if not usr.authenticated():
        print('Failed to authenticate')
        return False
    return True

def fillstats(br, base, stats):
    # Fetch stats and parse
    r = br.open(base + '/modals/broadband-bridge-modal.lp')
    bs = bs4.BeautifulSoup(r, 'lxml')
    if bs.find('div', 'login') != None:
        return False

    # Helper function to extract data
    def getvals(bs, text, mult = 1):
        subs = bs.findAll('label', text = text)[0].fetchNextSiblings()[0].strings
        tmp = [float(s.split()[0]) for s in subs]
        return [s * mult for s in tmp]

    if list(bs.findAll('label', text = 'DSL Status')[0].fetchNextSiblings()[0].strings)[0] == 'Up':
        stats.linkup = True
    else:
        stats.linkup = False

    stats.upstreammax, stats.downstreammax = getvals(bs, 'Maximum Line rate', 1e3)
    stats.upstream, stats.downstream = getvals(bs, 'Line Rate', 1e3)
    stats.uppower, stats.downpower = getvals(bs, 'Output Power')
    stats.nmup, stats.nmdown = getvals(bs, 'Noise Margin')

    # Line attenuation returns several values for each direction, parse specially and just take the first one
    upattens, downattens = list(bs.findAll('label', text = 'Line Attenuation')[0].fetchNextSiblings()[0].strings)
    stats.attenup = float(re.findall('([0-9.N/A]+)', upattens)[0])
    stats.attendown = float(re.findall('([0-9.N/A]+)', downattens)[0])

    # Convert something like '2days 17hours 28min 19sec' into seconds
    uptime = re.findall('([0-9]+)', list(bs.findAll('label', text = 'DSL Uptime')[0].fetchNextSiblings()[0].strings)[0])
    uptime.reverse() # End up with an array of seconds, minutes, hours, etc
    mults = [1, 60, 60 * 60, 24 * 60 * 60]
    if len(uptime) != len(mults):
        print('Unexpected number of uptime elements (%s)' % str(uptime))
        stats.uptime = None
    else:
        stats.uptime = reduce(lambda a, b: a + b, [int(a[0]) * a[1] for a in zip(uptime, mults)])

    return True

# Setup RRD
# We expect data to be logged every 5 minutes
# Average 12 5 minute points -> hourly stats (keep 168 - a weeks worth)
# Average 288 5 minute points -> daily stats (keep 1825 - 5 years worth)
# Detemine  minimum & maximum for an hour and keep a weeks worth.
def makerrd(filename):
    rrdtool.create(filename,
                   '--step', '300',
                   'DS:upstream:GAUGE:3600:32:150000',   # Upstream (kbits)
                   'DS:downstream:GAUGE:3600:32:150000', # Downstream (kbits)
                   'DS:upstreammax:GAUGE:3600:32:150000',   # Upstream maximum (kbits)
                   'DS:downstreammax:GAUGE:3600:32:150000', # Downstream maximum (kbits)
                   'DS:nmup:GAUGE:3600:0:100',          # Upstream Noise margin (dB)
                   'DS:nmdown:GAUGE:3600:0:100',        # Downstream Noise margin (dB)
                   'DS:attenup:GAUGE:3600:0:100',       # Upstream Attenuation (dB)
                   'DS:attendown:GAUGE:3600:0:100',     # Downstream Attenuation (dB)
                   'DS:fecATUC:DERIVE:3600:0:U',		# Upstream FEC error count
                   'DS:fecATUR:DERIVE:3600:0:U',		# Downstream FEC error count
                   'DS:powerup:GAUGE:3600:-100:100',    # Upstream Power (dBm)
                   'DS:powerdown:GAUGE:3600:-100:100',  # Downstream Power (dBm)
                   'DS:uptime:DERIVE:3600:0:U',			# Uptime (seconds)
                   'RRA:AVERAGE:0.1:12:168',
                   'RRA:AVERAGE:0.1:288:1825',
                   'RRA:MIN:0.1:12:168',
                   'RRA:MAX:0.1:12:168')

# Update the RRD (format stats as expected)
def updaterrd(filename, tstamp, stats):
    rrdtool.update(filename,
                   '%d:%d:%d:%d:%d:%f:%f:%f:%f:U:U:%f:%f:%d' % (
                       tstamp,
                       stats.upstream,
                       stats.downstream,
                       stats.upstreammax,
                       stats.downstreammax,
                       stats.nmup,
                       stats.nmdown,
                       stats.attenup,
                       stats.attendown,
                       stats.uppower,
                       stats.downpower,
                       stats.uptime))

# Open the URL and call the parser
def getdata():
    stats = getstats()
    return stats

# Generate a graph
def gengraph():

    linkargs = (
        '-a', 'SVG',
        '-X', '0',
        '-l', '0',
        '--vertical-label', 'kbit/sec',
        '--slope-mode',

        'DEF:upstream=%s:upstream:AVERAGE' % rrdname,
        'DEF:upstreammin=%s:upstream:MIN' % rrdname,
        'DEF:upstreammax=%s:upstream:MAX' % rrdname,
        'CDEF:upstreamdif=upstreammax,upstreammin,-',
        'DEF:maxupstream=%s:upstreammax:AVERAGE' % rrdname,

        'LINE0.001:upstreammin#000000:',
        'AREA:upstreamdif#00dc76::STACK',
        'LINE1:upstream#00ff00:Upstream',

        'LINE1:maxupstream#0000ff:Upstream (maximum)',

        'DEF:downstream=%s:downstream:AVERAGE' % rrdname,
        'DEF:downstreammin=%s:downstream:MIN' % rrdname,
        'DEF:downstreammax=%s:downstream:MAX' % rrdname,
        'CDEF:downstreamdif=downstreammax,downstreammin,-',
        'DEF:maxdownstream=%s:downstreammax:AVERAGE' % rrdname,

        'LINE0.001:downstreammin#000000:',
        'AREA:downstreamdif#ff8686::STACK',
        'LINE1:downstream#ff0000:Downstream',

        'LINE1:maxdownstream#000000:Downstream (maximum)'
    )

    signalargs = (
        '-a', 'SVG',
        '--vertical-label', 'dB',
        '--slope-mode',
        '-l', '0',

        'DEF:nmup=%s:nmup:AVERAGE' % rrdname,
        'DEF:nmupmin=%s:nmup:MIN' % rrdname,
        'DEF:nmupmax=%s:nmup:MAX' % rrdname,

        'CDEF:nmupdif=nmupmax,nmupmin,-',

        'LINE0.001:nmupmin#000000:',
        'AREA:nmupdif#5c5cff::STACK',
        'LINE1:nmup#0000ff:Noise Margin - Up',

        'DEF:nmdown=%s:nmdown:AVERAGE' % rrdname,
        'DEF:nmdownmin=%s:nmdown:MIN' % rrdname,
        'DEF:nmdownmax=%s:nmdown:MAX' % rrdname,

        'CDEF:nmdowndif=nmdownmax,nmdownmin,-',

        'LINE0.001:nmdownmin#000000:',
        'AREA:nmdowndif#009a00::STACK',
        'LINE1:nmdown#00ff00:Noise Margin - Down',

        'DEF:attenup=%s:attenup:AVERAGE' % rrdname,
        'DEF:attenupmin=%s:attenup:MIN' % rrdname,
        'DEF:attenupmax=%s:attenup:MAX' % rrdname,

        'CDEF:attenupdif=attenupmax,attenupmin,-',

        'LINE0.001:attenupmin#000000:',
        'AREA:attenupdif#f98100::STACK',
        'LINE1:attenup#ff0000:Attenuation - Up',

        'DEF:attendown=%s:attendown:AVERAGE' % rrdname,
        'DEF:attendownmin=%s:attendown:MIN' % rrdname,
        'DEF:attendownmax=%s:attendown:MAX' % rrdname,

        'CDEF:attendowndif=attendownmax,attendownmin,-',

        'LINE0.001:attendownmin#000000:',
        'AREA:attendowndif#aaaaaa::STACK',
        'LINE1:attendown#000000:Attenuation - Down',

        'DEF:powerup=%s:powerup:AVERAGE' % rrdname,
        'DEF:powerupmin=%s:powerup:MIN' % rrdname,
        'DEF:powerupmax=%s:powerup:MAX' % rrdname,

        'CDEF:powerupdif=powerupmax,powerupmin,-',

        'LINE0.001:powerupmin#000000:',
        'AREA:powerupdif#804007::STACK',
        'LINE1:powerup#ff800e:Power - Up (dBm)',

        'DEF:powerdown=%s:powerdown:AVERAGE' % rrdname,
        'DEF:powerdownmin=%s:powerdown:MIN' % rrdname,
        'DEF:powerdownmax=%s:powerdown:MAX' % rrdname,

        'CDEF:powerdowndif=powerdownmax,powerdownmin,-',

        'LINE0.001:powerdownmin#000000:',
        'AREA:powerdowndif#604872::STACK',
        'LINE1:powerdown#c090e5:Power - Down (dBm)',

        'DEF:uptime=%s:uptime:AVERAGE' % rrdname,
        'CDEF:uptimepct=uptime,10,*',
        'LINE1:uptimepct#606060:Uptime (10\'s%)',
        )

    rrdtool.graph("%s-hour-link.svg" % (graphbasename),
                  '--width', '768',
                  '--height', '256',
                  '--start', 'end - 7d',
                  '--end', 'now',
                  *linkargs)

    rrdtool.graph("%s-daily-5y-link.svg" % (graphbasename),
                  '--width', '768',
                  '--height', '256',
                  '--start', 'end - 1825d',
                  '--end', 'now',
                  *linkargs)

    rrdtool.graph("%s-daily-link.svg" % (graphbasename),
                  '--width', '768',
                  '--height', '256',
                  '--start', 'end - 365d',
                  '--end', 'now',
                  *linkargs)


    rrdtool.graph("%s-hour-signal.svg" % (graphbasename),
                  '--width', '768',
                  '--height', '256',
                  '--start', 'end - 7d',
                  '--end', 'now',
                  *signalargs)

    rrdtool.graph("%s-daily-signal.svg" % (graphbasename),
                  '--width', '768',
                  '--height', '256',
                  '--start', 'end - 365d',
                  '--end', 'now',
                  *signalargs)

    rrdtool.graph("%s-daily-5y-signal.svg" % (graphbasename),
                  '--width', '768',
                  '--height', '256',
                  '--start', 'end - 1825d',
                  '--end', 'now',
                  *signalargs)

if __name__ == "__main__":
    names = ['Noise Margin (up)', 'Noise Margin (down)', 'Attenuation (up)', 'Attenuation (down)']
    if args.munin != None:
        # Handle the wrapper passing us its $0 as our $1
        args.munin = args.munin.split('_')[-1]
        if args.munin not in ['signal', 'sync']:
            print("Unknown data type ", args.munin)
            sys.exit(1)
        if len(args.rest) > 0:
            if args.rest[0] == 'config':
                if args.munin == 'signal':
                    print('''graph_category adsl
graph_title DSL Signal Quality
graph_args --base 1000 -l 0
graph_vlabel dB''')
                    for n in names:
                        name = n.translate(None, ' ()').lower()
                        print('''%s.label %s
%s.type GAUGE
%s.max 100
%s.min 0''' % (name, n, name, name, name))
                elif args.munin == 'sync':
                    print('''graph_category adsl
graph_title DSL Sync Speed
graph_args --base 1024 -l 0
graph_vlabel kbit/sec
up.label Up
up.type GAUGE
up.max 150000
up.min 0
down.label Down
down.type GAUGE
down.max 15000
down.min 0
upmax.label Up (max)
upmax.type GAUGE
upmax.max 150000
upmax.min 0
downmax.label Down (max)
downmax.type GAUGE
downmax.max 150000
downmax.min 0''')
            sys.exit(0)
    if args.update or args.munin:
        stats = getdata()
        if args.verbose:
            if stats == None:
                print("Modem is offline")
            else:
                print(stats)
    if (args.update or args.munin != None) and stats != None:
        if args.update:
            try:
                os.stat(rrdname)
            except OSError as e:
                if e.errno == 2:
                    print("rrd not found, creating..")
                    makerrd(rrdname)
            updaterrd(rrdname, int(time.time()), stats)
        if args.munin != None:
            if args.munin == 'signal':
                print('''noisemarginup.value %.1f
noisemargindown.value %.1f
attenuationup.value %.1f
attenuationdown.value %.1f''' % (stats.nmup, stats.nmdown, stats.attenup, stats.attendown))
            elif args.munin == 'sync':
                s = '''up.value %.1f
down.value %.1f\n''' % (stats.upstream, stats.downstream)
                if hasattr(stats, 'upstreammax'):
                    s += '''upmax.value %.1f
downmax.value %.1f''' % (stats.upstreammax, stats.downstreammax)
                print(s)
    if args.graph:
        gengraph()