Mini Shell

Direktori : /opt/imh-python/lib/python3.9/site-packages/ngxutil/
Upload File :
Current File : //opt/imh-python/lib/python3.9/site-packages/ngxutil/api.py

# vim: set ts=4 sw=4 expandtab syntax=python:
"""

ngxutil.api
JSON & YAML interfaces

@author J. Hipps <jacobh@inmotionhosting.com>

"""

import os
import sys
import re
import logging
from time import time
from glob import glob

from ngxutil import default_cache_base
from ngxutil import logparse, influx
from ngxutil.vts import parse_vts
from ngxutil.cache import read_cache_file
from ngxutil.util import *

logger = logging.getLogger('ngxutil')


def get_server_info(include_logdata=False, noflux=False, tail=None, span=24, infile='/var/log/nginx/access.log'):
    """
    Return server-wide stats
    """
    return get_domain_info('*', include_logdata=include_logdata, noflux=noflux, tail=tail, span=span, infile=infile)

def get_cache_stats(zone, vhost=None, domain=None, cbase=default_cache_base):
    """
    Return stats about currently cached pages
    """
    gpath = os.path.join(cbase, zone, '*', '*')
    tally = {'files': 0, 'size': 0}
    for tfile in glob(gpath):
        cdata = read_cache_file(tfile)
        if domain:
            if not re.match(r'^https?(GET|HEAD|OPTIONS)(' + domain.lower() + r'|' + vhost.lower() + r')/?.*$', cdata['key'], re.I):
                continue
        tally['files'] += 1
        tally['size'] += cdata['stat'].st_size
    return tally

def get_top(logdata=None, user=None, lastlines=10000, topcount=10, rresolve=True, infile='/var/log/nginx/access.log'):
    """
    Generate report of top hits
    If @logdata is not supplied, then a fresh read of the log is done,
    pulling the last @lastlines lines from the file
    """
    if not logdata:
        logdata = logparse.parse_log(infile, lastlines=lastlines)

    # Get Top stuffs
    uhost = list(logparse.get_top_hosts(logdata, topcount=topcount).values())
    utop = list(logparse.get_top_uris(logdata, czone=user, topcount=topcount, colorize=False).values())
    uagent = logparse.get_top_agents(logdata, topcount=topcount)
    uips = logparse.get_top_ips(logdata, rresolve=rresolve, topcount=topcount)
    ustat = logparse.get_status_hitrate(logdata)
    if logdata and logdata[0].get('loc') is not None:
        uloc = logparse.get_top_locations(logdata, topcount=topcount)
    else:
        uloc = {}

    # Build output struct
    odata = {
        'host': uhost,
        'uri': utop,
        'agent': dict(uagent),
        'loc': dict(uloc),
        'ip': dict(uips),
        'status': ustat
    }

    return odata

def get_domain_info(domain, vdata=None, include_logdata=False, noflux=False, tail=None, span=24, infile='/var/log/nginx/access.log'):
    """
    Return domain info for @domain
    If @domain is '*', return server-wide results
    """
    tstart = time()
    if vdata is None:
        vdata = parse_vts()

    duser = get_domain_owner(domain)
    if not duser:
        return None

    try:
        tvd = vdata['serverZones'][duser['vhost']]
    except:
        return {'error': "Failed to retrieve status data for domain '%s'. Data not yet available." % (domain)}

    # Get cache stats
    cstats = get_cache_stats(duser['owner'], duser['vhost'], domain)

    datasrc = None
    logdata = None

    # determine loglines to read
    if tail is not None:
        llines = tail
    else:
        if span:
            llines = None
        else:
            llines = vdata['connections']['requests']

    # Use data from InfluxDB series, if available
    if not noflux:
        datasrc = 'influxdb'
        logdata = influx.fetch_all(duser['vhost'].lower(), span)
        if logdata is None:
            logger.warning("Failed to retrieve data from InfluxDB. Aborting. Use --noflux to force logparse input.")
            return None

    if logdata is None:
        # Get access.log data, then filter it based on the vhost field
        try:
            logger.info("Parsing log '%s' (%s)...", infile, format_size(os.stat(infile).st_size))
        except Exception as e:
            logger.error("Unable to stat input file: %s", str(e))
            sys.exit(100)
        aclog = logparse.parse_log(infile, span=span, lastlines=llines)
        logdata = logparse.filter_log(aclog, 'vhost', duser['vhost'].lower())
        datasrc = 'logparse'

    chits = logparse.get_cache_hitrate(logdata)

    # Get top stats
    topdata = get_top(logdata, user=duser['owner'], infile=infile)

    # Calculate time delta
    tdelta = time() - tstart

    # Build final output struct
    odata = {
        'data': {
            'domain': domain,
            'user': duser['owner'],
            'vhost': duser['vhost'],
            'vts_server': {
                'uptime': vdata.get('nowMsec') - vdata.get('loadMsec'),
                'uptime_fmt': format_uptime((vdata.get('nowMsec') - vdata.get('loadMsec')) / 1000.0),
                'hostname': vdata.get('hostName'),
                'version': vdata.get('nginxVersion')
            },
            'vts_domain': tvd,
            'top': topdata,
            'cache': {
                'stats': cstats,
                'hitrate': chits
            },
            'datasrc': datasrc,
            'timedelta': tdelta,
            'logdata': None
        }
    }

    if include_logdata:
        odata['logdata'] = logdata

    return odata

Zerion Mini Shell 1.0