Mini Shell

Direktori : /opt/imh-python/lib/python3.9/site-packages/ngxstats/
Upload File :
Current File : //opt/imh-python/lib/python3.9/site-packages/ngxstats/stats_app.py

# vim: set ts=4 sw=4 expandtab syntax=python:
"""
ngxstats.stats_app
Realtime Nginx stats aggregation tool
Flask App for Stats API

Copyright (c) 2019-2020 InMotion Hosting, Inc.
https://www.inmotionhosting.com/

@author J. Hipps <jacobh@inmotionhosting.com>
"""

import os
import platform
import re
import pwd
import json
import random
import string
import base64
import logging
import logging.handlers
from time import time
from functools import wraps

import arrow
import psycopg2
from psycopg2.extras import RealDictCursor
from psycopg2.extensions import (
    QueryCanceledError,
    STATUS_READY,
    STATUS_BEGIN,
    TRANSACTION_STATUS_INERROR,
)
from flask import Flask, Blueprint, request, make_response, jsonify

from ngxstats.util import gconf
from ngxstats import __version__, __api_version__


# time            | timestamp with time zone | not null |         | plain
# remote_addr     | inet                     | not null |         | main
# remote_user     | character varying(64)    |          |         | extended
# request         | text                     | not null |         | extended
# rstatus         | smallint                 | not null |         | plain
# body_bytes_sent | integer                  | not null | 0       | plain
# referer         | text                     |          |         | extended
# user_agent      | character varying(256)   |          |         | extended
# geoip_country   | character varying(64)    |          |         | extended
# geoip_city      | character varying(64)    |          |         | extended
# request_time    | double precision         | not null | 0.0     | plain
# proto           | character(16)            | not null |         | extended
# ucs             | character(20)            |          |         | extended
# xpc             | character(20)            |          |         | extended
# uct             | double precision         | not null | 0.0     | plain
# gz              | double precision         |          |         | plain
# tls             | character(10)            |          |         | extended
# cipher          | character varying(64)    |          |         | extended
# scheme          | character(8)             | not null |         | extended
# eorigin         | character(8)             |          |         | extended
# server_name     | character varying(256)   | not null |         | extended
# http_host       | character varying(256)   | not null |         | extended
# hostname        | character varying(256)   | not null |         | extended
# connection_id   | bigint                   | not null |         | plain


# id           | integer                | not null | nextval('users_id_seq'::regclass) | plain
# username     | character varying(64)  | not null |                                   | extended
# domains      | character varying[]    | not null |                                   | extended
# access_level | t_access               | not null | 'user'::t_access                  | plain
# sharedkey    | character(64)          | not null |                                   | extended
# server_name  | character varying(256) | not null |                                   | extended


ACL_ENUM = ('user', 'srvadmin', 'superadmin')
SERIES_COUNT = {
    'count': {'series': None, 'op': 'count(*)', 'group': None},
    'rstatus': {
        'series': 'rstatus',
        'op': 'rstatus, count(*)',
        'group': 'rstatus',
    },
    'http_host': {
        'series': 'http_host',
        'op': 'http_host, count(*)',
        'group': 'http_host',
    },
    'xpc': {'series': 'xpc', 'op': 'xpc, count(*)', 'group': 'xpc'},
}
SERIES_STAT = {
    'request_time': {
        'field': ['avg', 'median'],
        'series_field': None,
        'op': 'count(*)',
        'group': 'avg(request_time), percentile_cont(0.5) within group (order by request_time) as median',
    },
}
AGGRO_SUMMARY = ('xpc', 'rstatus', 'geoip_country')
AGGRO_FIELDS = (
    'remote_addr',
    'request',
    'rstatus',
    'referer',
    'user_agent',
    'geoip_country',
    'geoip_city',
    'proto',
    'xpc',
    'tls',
    'cipher',
    'scheme',
    'eorigin',
    'server_name',
    'http_host',
)
FQDN = platform.node()

logger = logging.getLogger('ngxstats')
authcache = {}
con = None
cur = None

app = Flask('ngxstats')
v1 = Blueprint('v1', 'v1')


def initialize_app():
    """Setup Flask app"""
    gconf.parse_config()
    # app.config['SERVER_NAME'] = gconf.stats_server
    pg_connect()

    app.register_blueprint(v1, url_prefix='/v1')
    app.register_blueprint(v1, url_prefix='/')
    app.run(
        debug=gconf.stats_debug, host=gconf.stats_host, port=gconf.stats_port
    )

    from flask.logging import default_handler

    app.logger.removeHandler(default_handler)
    app.logger = logging.getLogger('ngxstats')


def pg_connect():
    """
    Connect to database
    """
    global con, cur

    try:
        timeout = int(gconf.stats_query_timeout)
        con = psycopg2.connect(
            gconf.pg_socket + f" options='-c statement_timeout={timeout}s'"
        )
        logger.info("stats_app: connected to Postgres")
    except Exception as e:
        logger.critical(
            "stats_app: failed to connect to Postgres server: %s", str(e)
        )
        os._exit(1)

    cur = con.cursor(cursor_factory=RealDictCursor)


def ensure_pg_status():
    """
    Ensure postgres is healthy
    """
    if con.status not in (STATUS_READY, STATUS_BEGIN):
        logger.debug("ensure_pg_status: reconnecting to database")
        pg_connect()
    elif con.get_transaction_status() == TRANSACTION_STATUS_INERROR:
        logger.debug("ensure_pg_status: rolling back previous transaction")
        con.rollback()


def write_userconf(user, ukey):
    """
    Write JSON file with user config to ~/.ngxstats
    If @ukey is None, file is removed
    """
    udata = {
        'user': user,
        'key': ukey,
        'server': FQDN,
        'endpoint': gconf.stats_endpoint,
    }
    fpath = os.path.realpath(os.path.join('/home', user, '.ngxstats'))

    # if ukey is None, delete the user config
    if ukey is None:
        try:
            os.unlink(fpath)
        except Exception as e:
            logger.warning(
                "stats_app: write_userconf: failed to remove config [%s]: %s",
                fpath,
                str(e),
            )
            return False
    else:
        try:
            with open(fpath, 'w', encoding='utf-8') as f:
                json.dump(udata, f)
            upwd = pwd.getpwnam(user)
            os.chown(fpath, upwd.pw_uid, upwd.pw_gid)
            os.chmod(fpath, 0o0640)
        except Exception as e:
            logger.error(
                "stats_app: write_userconf: failed to write config [%s]: %s",
                fpath,
                str(e),
            )
            return False
    return True


def generate_key():
    """
    Generate a random 64 byte base64 key
    """
    return base64.b64encode(
        ''.join(
            [
                random.choice(string.ascii_letters + string.digits)
                for x in range(64)
            ]
        ).encode('utf8')
    )[:64].decode('utf8')


def verify_token(token):
    """
    Verify authentication token
    Returns a tuple of (username, domains, access_level) or None
    Token format: 'username:sharedkey'

    Once a valid token is provided, it will be cached for 'stats_authcache' seconds
    authcache format: [token](username, domains, access_level, timestamp)
    """
    # Check to see if token is saved in authcache
    if token in authcache:
        tcache = authcache[token]
        # Ensure cached token still valid
        if time() - tcache[3] > gconf.stats_authcache:
            logger.debug(
                "stats_app: verify_token: authcache expired for %s", tcache[0]
            )
            del authcache[token]
        else:
            return tcache[:3]

    # If nothing cached, or cache invalid, query the DB
    ensure_pg_status()
    try:
        user, psk = token.split(':', 1)
        cur.execute(
            "SELECT access_level,domains FROM users WHERE username = %s AND sharedkey = %s;",
            (user, psk),
        )
        rez = cur.fetchone()
        alevel = rez['access_level']
        adomains = rez['domains']
        authcache[token] = (user, adomains, alevel, time())
        logger.debug(
            "stats_app: verify_token: user %s authenticated; caching for %d sec",
            user,
            gconf.stats_authcache,
        )
        return (user, adomains, alevel)
    except Exception as e:
        logger.warning("stats_app: verify_token: fetch failed: %s", str(e))
        pg_connect()
    return None


def auth_error():
    """
    Build Flask 401 error response
    """
    res = make_response((jsonify(error='Authorization required'), 401))
    res.headers['WWW-Authenticate'] = "Token realm=ngxstats"
    return res


def login_required(min_access='user', passthru=False):
    """
    Decorator to ensure user is logged in
    Will call auth_error() on failure
    @min_access specifies the minimum access level (user < srvadmin < superadmin)
    If @passthru is True, then 'a_user' and 'a_access' are injected into
    the decorated function's kwargs
    """

    def wrapper(f):
        @wraps(f)
        def decorated(*args, **kwargs):
            if not request.headers.get('Authorization'):
                return auth_error()

            try:
                _atype, atoken = request.headers['Authorization'].split(' ', 1)
            except Exception as e:
                logger.debug(
                    "stats_app: failed to parse Authorization header: %s",
                    str(e),
                )
                return auth_error()

            auser = verify_token(atoken)
            if auser is None:
                return auth_error()

            try:
                if ACL_ENUM.index(auser[2]) >= ACL_ENUM.index(min_access):
                    if passthru is True:
                        kwargs['a_user'] = auser[0]
                        kwargs['a_domains'] = auser[1]
                        kwargs['a_access'] = auser[2]
                    return f(*args, **kwargs)
                logger.debug(
                    "stats_app: user %s has insufficient access_level for this function",
                    auser[0],
                )
            except Exception as e:
                logger.warning(
                    "stats_app: failed to compare user[%s] access level [%s]: %s",
                    auser,
                    min_access,
                    str(e),
                )
                raise e
            return auth_error()

        return decorated

    return wrapper


def params(arglist=None, optional=None, use_json=False):
    """
    Decorator to transform params to function args
    If @use_json is True, JSON is decoded from request data
    Otherwise, request GET/POST args are used
    """
    if arglist is None:
        arglist = []
    if optional is None:
        optional = []

    def wrapper(f):
        @wraps(f)
        def decorated(*args, **kwargs):
            if use_json:
                try:
                    ijson = request.get_json()
                except Exception as e:
                    logger.error(
                        "Invalid JSON data or missing request Content-Type: %s",
                        e,
                    )
                    return (
                        jsonify(
                            error="Invalid JSON data or missing request Content-Type"
                        ),
                        400,
                    )

            for targ in arglist:
                try:
                    if use_json:
                        kwargs[targ] = ijson[targ]
                    else:
                        kwargs[targ] = request.args[targ]
                except Exception as e:
                    logger.error(
                        "Request missing required argument %r: %s", targ, e
                    )
                    return (
                        jsonify(error=f"Missing required argument {targ!r}"),
                        400,
                    )
            for targ in optional:
                if use_json:
                    if ijson.get(targ):
                        kwargs[targ] = ijson[targ]
                else:
                    if request.args.get(targ):
                        kwargs[targ] = request.args.get(targ)
            return f(*args, **kwargs)

        return decorated

    return wrapper


###############################################################################
# Routes
#


@v1.route('/admin/user/<user>', methods=['GET'])
@login_required(min_access='superadmin')
def r_admin_user_get(user=None):
    """
    GET /admin/user/<user>
    Fetch existing user credentials
    """
    try:
        cur.execute("SELECT * FROM users WHERE username = %s", (user,))
        tuser = cur.fetchall()[0]
    except QueryCanceledError as e:
        logger.error("stats_app: AdminUser::get: Query timed out: %s", e)
        return (
            jsonify(status="timeout", error=f"timed out: {e}"),
            504,
        )
    except Exception:
        logger.error("stats_app: AdminUser::get: User %r not found", user)
        return (jsonify(status="failed", error="User not found"), 404)
    return (jsonify(status="ok", user=tuser), 200)


@v1.route('/admin/user/<user>', methods=['PUT'])
@login_required(min_access='srvadmin', passthru=True)
@params(['domains'], optional=['access_level'], use_json=True)
def r_admin_user_create(
    user=None,
    domains=None,
    access_level='user',
    a_user=None,
    a_access=None,
    a_domains=None,
):
    """
    PUT /admin/user/<user>
    Create new user
    """
    # srvadmin users can only create users
    if a_access == 'srvadmin' and access_level != 'user':
        return (
            jsonify(
                status="badauth",
                error='Current access level can only create regular users',
            ),
            401,
        )

    # Ensure 'domains' is a list
    if not isinstance(domains, list):
        return (
            jsonify(status="failed", error='domains value should be a list'),
            400,
        )

    # Ensure access_level is correct
    if access_level not in ACL_ENUM:
        logger.debug(
            "stats_app: AdminUser::put: access_level '%s' is invalid",
            access_level,
        )
        return (jsonify(status="failed", error='access_level is invalid'), 400)

    # Generate key and assemble query
    psk = generate_key()
    udata = (user, domains, access_level, psk, FQDN)

    # write to database
    # (id, username, domains[], access_level, sharedkey, server_name)
    ensure_pg_status()
    try:
        cur.execute(
            "INSERT INTO users VALUES(DEFAULT, %s, %s, %s, %s, %s)", udata
        )
        con.commit()
    except QueryCanceledError as e:
        logger.error("stats_app: AdminUser::put: Query timed out: %s", e)
        return (
            jsonify(status="timeout", error=f"timed out: {e}"),
            504,
        )
    except Exception as e:
        logger.error("stats_app: AdminUser::put: insert failed: %s", e)
        return (
            jsonify(status="failed", error="Failed to insert user in database"),
            503,
        )

    # XXX-TODO: Add user id to response object
    # Write key to user's homedir
    if not write_userconf(user, psk):
        return (
            jsonify(
                status="failed",
                error="User created, but failed to write config to user homedir",
            ),
            503,
        )

    return jsonify(
        status="ok",
        user={
            'username': udata[0],
            'domains': udata[1],
            'access_level': udata[2],
            'sharedkey': udata[3],
            'server_name': udata[4],
        },
    )


@v1.route('/admin/user/<user>', methods=['POST'])
@login_required(min_access='srvadmin', passthru=True)
@params(['domains'], optional=['access_level'], use_json=True)
def r_admin_user_update(
    user=None,
    domains=None,
    access_level='user',
    a_user=None,
    a_access=None,
    a_domains=None,
):
    """
    POST /admin/user/<user>
    Update an existing user; also generates and writes a new key
    """
    # srvadmin users can only create users
    if a_access == 'srvadmin' and access_level != 'user':
        return make_response(
            jsonify(error='Current access level can only create regular users'),
            401,
        )

    # Ensure access_level is correct
    if access_level not in ACL_ENUM:
        logger.debug(
            "stats_app: AdminUser::put: access_level '%s' is invalid",
            access_level,
        )
        return (jsonify(status="failed", error='access_level is invalid'), 400)

    # XXX-TODO: pull existing entry to ensure srvadmin can't modify superadmin

    # Generate key and assemble query
    psk = generate_key()
    udata = (domains, access_level, psk, FQDN, user)

    # update in database
    # (id, username, domains[], access_level, sharedkey, server_name)
    ensure_pg_status()
    try:
        cur.execute(
            "UPDATE users SET domains = %s, access_level = %s, sharedkey = %s, server_name = %s WHERE user = %s",
            udata,
        )
        con.commit()
    except QueryCanceledError as e:
        logger.error("stats_app: AdminUser::post: Query timed out: %s", e)
        return (
            jsonify(status="timeout", error=f"timed out: {e}"),
            504,
        )
    except Exception as e:
        logger.error("stats_app: AdminUser::post: update failed: %s", e)
        return (
            jsonify(status="failed", error="Failed to update user in database"),
            503,
        )

    # Write key to user's homedir
    if not write_userconf(user, psk):
        return (
            jsonify(
                status="failed",
                error="User updated, but failed to write config to user homedir",
            ),
            503,
        )

    return jsonify(
        status="ok",
        user={
            'username': udata[4],
            'domains': udata[0],
            'access_level': udata[1],
            'sharedkey': udata[2],
            'server_name': udata[3],
        },
    )


@v1.route('/admin/user/<user>', methods=['DELETE'])
@login_required(min_access='srvadmin', passthru=True)
def r_admin_user_delete(user=None, a_user=None, a_access=None, a_domains=None):
    """
    Delete an existing user
    """
    # XXX-TODO: pull existing entry to ensure srvadmin can't modify superadmin
    ensure_pg_status()
    try:
        cur.execute("DELETE FROM users WHERE user = %s", (user,))
        con.commit()
    except QueryCanceledError as e:
        logger.error("stats_app: AdminUser::delete: Query timed out: %s", e)
        return (
            jsonify(status="timeout", error=f"timed out: {e}"),
            504,
        )
    except Exception as e:
        logger.error("stats_app: AdminUser::delete: update failed: %s", str(e))
        return (
            jsonify(
                status="failed",
                error="Failed to find or update user in database",
            ),
            404,
        )

    # Delete key from homedir
    write_userconf(user, None)
    return (jsonify(status="ok"), 200)


@v1.route('/user/ping', methods=['GET'])
@login_required()
def r_user_ping():
    """
    Returns PONG and version info if the server is up and authentication is successful
    """
    return (
        jsonify(
            status="ok",
            pong=True,
            version=__version__,
            api_version=__api_version__,
        ),
        200,
    )


@v1.route('/requests/<domain>', methods=['GET'])
@login_required(passthru=True)
@params(optional=['start', 'stop'])
def r_requests_get(
    domain=None,
    start=-21600,
    stop=0,
    a_user=None,
    a_access=None,
    a_domains=None,
):
    """
    GET /requests/<domain>?start=-21600&stop=0
    Return list of requests between @start and @stop span for specified @domain
    @start is start time in seconds since UNIX epoch OR if negative number,
        offset from current time
    @stop is stop time in seconds since UNIX epoch OR if negative number, offset
        from current time OR `0` for current time
    """
    if a_access == 'user':
        try:
            if domain not in a_domains:
                logger.warning(
                    "stats_app: RequestsList::get: user %s does not have access to domain %s",
                    a_user,
                    domain,
                )
                return (
                    jsonify(
                        status="authfail",
                        error=f"User does not have access to domain {domain!r}",
                    ),
                    401,
                )
        except Exception:
            logger.error(
                "stats_app: RequestsList::get: unable to determine user %s access to domain %s",
                a_user,
                domain,
            )
            return (
                jsonify(
                    status="authfail",
                    error=f"Unable to determine user access to domain {domain!r}",
                ),
                401,
            )

    # check params
    try:
        if int(start) < 0:
            tnow = arrow.now()
            start = arrow.get(tnow.int_timestamp - -int(start))
            stop = arrow.get(tnow.int_timestamp - int(stop))
        else:
            start = arrow.get(int(start))
            stop = arrow.get(int(stop))
    except Exception:
        logger.error("stats_app: RequestsList::get: bad start/stop args")
        return (jsonify(status="failed", error="Bad stop/start arguments"), 400)

    ensure_pg_status()
    try:
        cur.execute(
            "SELECT time,remote_addr,request,rstatus,scheme,xpc,request_time "
            "FROM access_log WHERE server_name = %s AND "
            "time >= %s AND time <= %s",
            (
                domain,
                start.format("YYYY-MM-DD HH:mm:ss"),
                stop.format("YYYY-MM-DD HH:mm:ss"),
            ),
        )
        qq = cur.fetchall()
    except QueryCanceledError as e:
        logger.error("stats_app: RequestsList::get: Query timed out: %s", e)
        return (
            jsonify(status="timeout", error=f"timed out: {e}"),
            504,
        )
    except Exception as e:
        logger.error("stats_app: RequestsList::get: query failed: %s", e)
        return (
            jsonify(status="failed", error=f"Query failed: {e}"),
            503,
        )

    # prepare response
    urimatch = re.compile(r'^([A-Z]+) (.+) HTTP.*$')
    rlist = []
    for tq in qq:
        try:
            ttype, turi = urimatch.match(tq['request']).groups()
        except Exception:
            turi = None
            ttype = None
        rlist.append(
            {
                'time': arrow.get(tq['time']).int_timestamp,
                'remote_addr': tq['remote_addr'].strip(),
                'uri': turi,
                'request_type': ttype,
                'rstatus': tq['rstatus'],
                'scheme': tq['scheme'].strip(),
                'xpc': tq['xpc'].strip(),
                'request_time': tq['request_time'],
            }
        )
    return (
        jsonify(
            status="ok",
            domain=domain,
            start=start.int_timestamp,
            stop=stop.int_timestamp,
            count=len(qq),
            results=rlist,
        ),
        200,
    )


@v1.route('/requests/<domain>/full', methods=['GET'])
@login_required(passthru=True)
@params(optional=['start', 'stop'])
def r_requests_full(
    domain=None,
    start=-21600,
    stop=0,
    a_user=None,
    a_access=None,
    a_domains=None,
):
    """
    GET /requests/<domain>/full?start=-21600&stop=0
    Return list of requests between @start and @stop span for specified @domain
    @start is start time in seconds since UNIX epoch OR if negative number,
        offset from current time
    @stop is stop time in seconds since UNIX epoch OR if negative number, offset
        from current time OR `0` for current time
    """
    if a_access == 'user':
        try:
            if domain not in a_domains:
                logger.warning(
                    "stats_app: RequestsFull::get: user %s does not have access to domain %s",
                    a_user,
                    domain,
                )
                return (
                    jsonify(
                        status="authfail",
                        error=f"User does not have access to domain {domain!r}",
                    ),
                    401,
                )
        except Exception:
            logger.error(
                "stats_app: RequestsFull::get: unable to determine user %s access to domain %s",
                a_user,
                domain,
            )
            return (
                jsonify(
                    status="authfail",
                    error=f"Unable to determine user access to domain {domain!r}",
                ),
                401,
            )

    # check params
    try:
        if int(start) < 0:
            tnow = arrow.now()
            start = arrow.get(tnow.int_timestamp - -int(start))
            stop = arrow.get(tnow.int_timestamp - int(stop))
        else:
            start = arrow.get(int(start))
            stop = arrow.get(int(stop))
    except Exception:
        logger.error("stats_app: RequestsFull::get: bad start/stop args")
        return (jsonify(status="failed", error="Bad stop/start arguments"), 400)

    ensure_pg_status()
    try:
        cur.execute(
            "SELECT * "
            "FROM access_log WHERE server_name = %s AND "
            "time >= %s AND time <= %s",
            (
                domain,
                start.format("YYYY-MM-DD HH:mm:ss"),
                stop.format("YYYY-MM-DD HH:mm:ss"),
            ),
        )
        qq = cur.fetchall()
    except QueryCanceledError as e:
        logger.error("stats_app: RequestsFull::get: Query timed out: %s", e)
        return (
            jsonify(status="timeout", error=f"timed out: {e}"),
            504,
        )
    except Exception as e:
        logger.error("stats_app: RequestsFull::get: query failed: %s", str(e))
        return (
            jsonify(status="failed", error=f"Query failed: {e}"),
            503,
        )

    # prepare response
    urimatch = re.compile(r'^([A-Z]+) (.+) HTTP.*$')
    rlist = []
    for tq in qq:
        try:
            ttype, turi = urimatch.match(tq['request']).groups()
        except Exception:
            turi = None
            ttype = None

        rlist.append(
            {
                'time': arrow.get(tq['time']).int_timestamp,
                'remote_addr': tq['remote_addr'].strip(),
                'remote_user': tq['remote_user'].strip(),
                'request': tq['request'].strip(),
                'uri': turi,
                'request_type': ttype,
                'rstatus': tq['rstatus'],
                'body_bytes_sent': tq['body_bytes_sent'],
                'referer': tq['referer'].strip(),
                'user_agent': tq['user_agent'].strip(),
                'geoip_country': tq['geoip_country'].strip(),
                'geoip_city': tq['geoip_city'].strip(),
                'request_time': tq['request_time'],
                'proto': tq['proto'].strip(),
                'ucs': tq['ucs'].strip(),
                'xpc': tq['xpc'].strip(),
                'uct': tq['uct'],
                'gz': tq['gz'],
                'brotli': tq['brotli'],
                'tls': tq['tls'].strip(),
                'cipher': tq['cipher'].strip(),
                'scheme': tq['scheme'].strip(),
                'eorigin': tq['eorigin'].strip(),
                'server_name': tq['server_name'].strip(),
                'http_host': tq['http_host'].strip(),
                'hostname': tq['hostname'].strip().split('.', maxsplit=1)[0],
                'connection_id': tq['connection_id'],
                'extras': tq['extras'].strip(),
            }
        )
    return (
        jsonify(
            status="ok",
            domain=domain,
            start=start.int_timestamp,
            stop=stop.int_timestamp,
            count=len(qq),
            results=rlist,
        ),
        200,
    )


# XXX-TODO: Not yet implemented
#           Need to update the database to inject an extra serialization field, along with other schema changes
@v1.route('/requests/<domain>/<id>', methods=['GET'])
@login_required(passthru=True)
def r_requests_single(
    domain=None, id=None, a_user=None, a_access=None, a_domains=None
):
    """
    GET /requests/<domain>/<id>
    Return single request with matching @domain and @id
    """
    ## XXX-TODO: add superuser support
    # try:
    #    if domain not in a_domains:
    #        logger.warning("stats_app: RequestSingle::get: user %s does not have access to domain %s", a_user, domain)
    #        return (jsonify(error="User does not have access to domain '%s'" % (domain)), 401)
    # except Exception:
    #    logger.error("stats_app: RequestSingle::get: unable to determine user %s access to domain %s", a_user, domain)
    #    return (jsonify(error="Unable to determine user access to domain '%s'" % (domain)), 401)
    #
    ## XXX-TODO: add timeout support
    # try:
    #    cur.execute("SELECT * FROM access_log WHERE domain = %s AND time >= %s AND time <= %s", ())
    #    return jsonify(data=cur.fetchone())
    # except Exception as e:
    #    logger.error("stats_app: RequestSingle::get: query failed: %s", str(e))
    #    return (jsonify(error="Query failed: %s" % (str(e))), 503)
    return (jsonify(status="failed", error="Not implemented"), 503)


@v1.route('/series/count/<domain>/<field>')
@login_required(passthru=True)
@params(optional=['start', 'stop', 'bucket', 'zerofill'])
def r_series_field(
    domain=None,
    field=None,
    start=-21600,
    stop=0,
    bucket=600,
    zerofill=True,
    a_user=None,
    a_access=None,
    a_domains=None,
):
    """
    GET /series/count/<domain>/<field>?start=-21600&stop=0&bucket=600&zerofill=true
    Return time-series of data from @field between @start and @stop times
    @start is time in seconds since UNIX epoch OR if negative number, offset from current time
    @stop is time in seconds since UNIX epoch OR if negative number, offset from current time OR `0` for current time
    @bucket is time bucket size in seconds [default: 600]
    """
    if a_access == 'user':
        try:
            if domain not in a_domains:
                logger.warning(
                    "stats_app: Series::get: user %s does not have access to domain %s",
                    a_user,
                    domain,
                )
                return (
                    jsonify(
                        status="authfail",
                        error=f"User does not have access to domain {domain!r}",
                    ),
                    401,
                )
        except Exception:
            logger.error(
                "stats_app: Series::get: unable to determine user %s access to domain %s",
                a_user,
                domain,
            )
            return (
                jsonify(
                    status="authfail",
                    error=f"Unable to determine user access to domain {domain!r}",
                ),
                401,
            )

    # check params
    try:
        if int(start) < 0:
            tnow = arrow.now()
            start = arrow.get(tnow.int_timestamp - -int(start))
            stop = arrow.get(tnow.int_timestamp - int(stop))
        else:
            start = arrow.get(int(start))
            stop = arrow.get(int(stop))
    except Exception:
        logger.error("stats_app: Series::get: bad start/stop args")
        return (jsonify(status="failed", error="Bad stop/start arguments"), 400)

    # validate field name and get series info
    if field not in SERIES_COUNT:
        logger.error("stats_app: Series::get: bad field name '%s'", field)
        return (jsonify(status="failed", error="Bad field name"), 400)
    v_series = SERIES_COUNT[field]['series']
    v_op = SERIES_COUNT[field]['op']
    v_group = SERIES_COUNT[field]['group']
    if v_group:
        groupby = ", " + v_group
    else:
        groupby = ""

    ensure_pg_status()
    try:
        cur.execute(
            f"SELECT time_bucket('%s seconds', time) AS bucket, {v_op} "
            "FROM access_log WHERE server_name = %s AND "
            f"time >= %s AND time <= %s GROUP BY bucket{groupby}",
            (
                bucket,
                domain,
                start.format("YYYY-MM-DD HH:mm:ss"),
                stop.format("YYYY-MM-DD HH:mm:ss"),
            ),
        )
        qq = cur.fetchall()
    except QueryCanceledError as e:
        logger.error("stats_app: Series::get: Query timed out: %s", e)
        return (
            jsonify(status="timeout", error=f"timed out: {e}"),
            504,
        )
    except Exception as e:
        logger.error("stats_app: Series::get: query failed: %s", str(e))
        return (
            jsonify(status="failed", error=f"Query failed: {e}"),
            503,
        )

    # prepare response
    serlist = set()
    rlist = {}
    for tq in qq:
        tstamp = arrow.get(tq['bucket']).int_timestamp
        if tstamp not in rlist:
            rlist[tstamp] = {'time': tstamp, 'values': []}

        if v_series:
            sername = str(tq[v_series]).strip()
        else:
            sername = "count"
        serlist.add(sername)

        rlist[tstamp]['values'].append(
            {'series': sername, 'value': tq['count']}
        )

    # zerofill series missing data, if enabled
    if zerofill:
        for tq in rlist.values():
            for tser in serlist.difference({x['series'] for x in tq['values']}):
                tq['values'].append({'series': tser, 'value': 0})

    return (
        jsonify(
            status="ok",
            domain=domain,
            start=start.int_timestamp,
            stop=stop.int_timestamp,
            count=len(rlist),
            results=list(rlist.values()),
        ),
        200,
    )


@v1.route('/aggregate/summary/<domain>', methods=['GET'])
@login_required(passthru=True)
@params(optional=['start', 'stop'])
def r_aggro_summary(
    domain=None,
    start=-21600,
    stop=0,
    a_user=None,
    a_access=None,
    a_domains=None,
):
    """
    GET /aggregate/<domain>?start=-21600&stop=0
    Return aggregate summary of data for @domain between @start and @stop times
    @start is time in seconds since UNIX epoch OR if negative number, offset from current time
    @stop is time in seconds since UNIX epoch OR if negative number, offset from current time OR `0` for current time
    """
    if a_access == 'user':
        try:
            if domain not in a_domains:
                logger.warning(
                    "stats_app: AggroSummary::get: user %s does not have access to domain %s",
                    a_user,
                    domain,
                )
                return (
                    jsonify(
                        error=f"User does not have access to domain {domain!r}"
                    ),
                    401,
                )
        except Exception:
            logger.error(
                "stats_app: AggroSummary::get: unable to determine user %s access to domain %s",
                a_user,
                domain,
            )
            return (
                jsonify(
                    error=f"Unable to determine user access to domain {domain!r}"
                ),
                401,
            )

    # check params
    try:
        if int(start) < 0:
            tnow = arrow.now()
            start = arrow.get(tnow.int_timestamp - -int(start))
            stop = arrow.get(tnow.int_timestamp - int(stop))
        else:
            start = arrow.get(int(start))
            stop = arrow.get(int(stop))
    except Exception as e:
        logger.error(
            "stats_app: AggroSummary::get: bad start/stop args: %s", str(e)
        )
        return (jsonify(status="failed", error="Bad stop/start arguments"), 400)

    ensure_pg_status()
    try:
        qlist = []
        for tparam in AGGRO_SUMMARY:
            qlist.append(
                cur.mogrify(
                    f"SELECT %s as atype, {tparam}::text as key, count(*)"
                    "FROM access_log WHERE server_name = %s AND "
                    f"time >= %s AND time <= %s GROUP BY {tparam}",
                    (
                        tparam,
                        domain,
                        start.format("YYYY-MM-DD HH:mm:ss"),
                        stop.format("YYYY-MM-DD HH:mm:ss"),
                    ),
                )
            )
        cur.execute(b' UNION ALL '.join(qlist))
        qq = cur.fetchall()
    except QueryCanceledError as e:
        logger.error(
            "stats_app: AggroSummary::get: Query timed out: %s", str(e)
        )
        return (
            jsonify(status="timeout", error=f"timed out: {e}"),
            504,
        )
    except Exception as e:
        logger.error("stats_app: AggroSummary::get: query failed: %s", str(e))
        return (
            jsonify(status="failed", error=f"Query failed: {e}"),
            503,
        )

    # prepare response
    rdata = {x: [] for x in AGGRO_SUMMARY}
    for tq in qq:
        rdata[tq['atype']].append({'key': tq['key'], 'value': tq['count']})

    return (
        jsonify(
            status="ok",
            domain=domain,
            start=start.int_timestamp,
            stop=stop.int_timestamp,
            data=rdata,
        ),
        200,
    )


@v1.route('/aggregate/top/<domain>/<field>', methods=['GET'])
@login_required(passthru=True)
@params(optional=['start', 'stop', 'count'])
def r_aggro_top(
    domain=None,
    field=None,
    start=-21600,
    stop=0,
    count=10,
    a_user=None,
    a_access=None,
    a_domains=None,
):
    """
    GET /aggregate/<domain>/<field>?start=-21600&stop=0
    Return most-frequent items of data between @start and @stop times for @field in @domain
    @start is time in seconds since UNIX epoch OR if negative number, offset from current time
    @stop is time in seconds since UNIX epoch OR if negative number, offset from current time OR `0` for current time
    @count is the number of objects to return (default 10)
    """
    if a_access == 'user':
        try:
            if domain not in a_domains:
                logger.warning(
                    "stats_app: AggroTop::get: user %s does not have access to domain %s",
                    a_user,
                    domain,
                )
                return (
                    jsonify(
                        error=f"User does not have access to domain {domain}"
                    ),
                    401,
                )
        except Exception:
            logger.error(
                "stats_app: AggroTop::get: unable to determine user %s access to domain %s",
                a_user,
                domain,
            )
            return (
                jsonify(
                    error=f"Unable to determine user access to domain {domain!r}"
                ),
                401,
            )

    # check params
    try:
        if int(start) < 0:
            tnow = arrow.now()
            start = arrow.get(tnow.int_timestamp - -int(start))
            stop = arrow.get(tnow.int_timestamp - int(stop))
        else:
            start = arrow.get(int(start))
            stop = arrow.get(int(stop))
    except ValueError:
        logger.error("stats_app: AggroTop::get: bad start/stop args")
        return (jsonify(status="failed", error="Bad stop/start arguments"), 400)

    # validate field name & count
    if field not in AGGRO_FIELDS:
        logger.error("stats_app: AggroTop::get: bad field name '%s'", field)
        return (jsonify(status="failed", error="Bad field name"), 400)
    try:
        count = int(count)
    except ValueError:
        logger.error("stats_app: AggroTop::get: count should be an integer")
        return (
            jsonify(
                status="failed",
                error="count parameter should be an integer value",
            ),
            400,
        )

    # XXX-TODO: add timeout support
    ensure_pg_status()
    try:
        cur.execute(
            f"SELECT {field}::text as key, count(*)"
            "FROM access_log WHERE server_name = %s AND "
            f"time >= %s AND time <= %s GROUP BY {field} "
            f"ORDER BY count(*) DESC LIMIT {count}",
            (
                domain,
                start.format("YYYY-MM-DD HH:mm:ss"),
                stop.format("YYYY-MM-DD HH:mm:ss"),
            ),
        )
        qq = cur.fetchall()
    except QueryCanceledError as e:
        logger.error("stats_app: AggroTop::get: Query timed out: %s", e)
        return (
            jsonify(status="timeout", error=f"timed out: {e}"),
            504,
        )
    except Exception as e:
        logger.error("stats_app: AggroTop::get: query failed: %s", e)
        return (
            jsonify(status="failed", error=f"Query failed: {e}"),
            503,
        )

    # prepare response
    rdata = []
    for tq in qq:
        rdata.append({'key': tq['key'], 'value': tq['count']})

    return (
        jsonify(
            status="ok",
            domain=domain,
            start=start.int_timestamp,
            stop=stop.int_timestamp,
            data=rdata,
        ),
        200,
    )

Zerion Mini Shell 1.0