Skip to content

Instantly share code, notes, and snippets.

@crypticmind
Last active May 2, 2016 03:00
Show Gist options
  • Save crypticmind/88d951fb83cda6e963bdd7361e12e8a9 to your computer and use it in GitHub Desktop.
Save crypticmind/88d951fb83cda6e963bdd7361e12e8a9 to your computer and use it in GitHub Desktop.

Revisions

  1. crypticmind revised this gist May 2, 2016. 3 changed files with 62 additions and 7 deletions.
    34 changes: 34 additions & 0 deletions logging.conf
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,34 @@
    [loggers]
    keys=root,reporting

    [handlers]
    keys=consoleHandler,fileHandler

    [formatters]
    keys=simpleFormatter

    [logger_root]
    level=DEBUG
    handlers=fileHandler

    [logger_reporting]
    level=DEBUG
    handlers=fileHandler
    qualname=reporting
    propagate=0

    [handler_consoleHandler]
    class=StreamHandler
    level=DEBUG
    formatter=simpleFormatter
    args=(sys.stdout,)

    [handler_fileHandler]
    class=FileHandler
    level=DEBUG
    formatter=simpleFormatter
    args=("reporting.log",)

    [formatter_simpleFormatter]
    format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
    datefmt=
    16 changes: 14 additions & 2 deletions reporting.py
    Original file line number Diff line number Diff line change
    @@ -6,12 +6,19 @@
    import httplib
    import urllib2
    import json
    import logging
    import logging.config

    logging.config.fileConfig('logging.conf')

    logger = logging.getLogger(__name__)

    mysql_bin = '/opt/local/lib/mysql56/bin/mysql'


    def run_query(dbconf, query):
    """Run a SQL query and return a list of lists of values."""
    logger.info("Running query %s" % (query,))
    proc_output = subprocess.check_output([
    mysql_bin,
    '--defaults-extra-file=%s' % dbconf,
    @@ -21,6 +28,7 @@ def run_query(dbconf, query):
    '-e',
    query
    ]).split("\n")[:-1]
    logger.info("Query returned %d rows" % (len(proc_output),))
    return map(lambda line: line.split("\t"), proc_output)


    @@ -37,8 +45,10 @@ def get(self, key):
    c.execute("SELECT value FROM cache WHERE name = ? AND datetime(created, ttl || ' seconds') > datetime('now')", (str(key),))
    row = c.fetchone()
    if row:
    logger.debug("Cache hit: Key %s in cache %s" % (key, self.name))
    return pickle.loads(row[0])
    else:
    logger.debug("Cache miss: Key %s in cache %s" % (key, self.name))
    return None

    def get_or_else(self, key, f, ttl_secs):
    @@ -57,17 +67,19 @@ def put(self, key, value, ttl_secs):
    c = self.dbconn.cursor()
    c.execute("INSERT OR REPLACE INTO cache (name, value, created, ttl) VALUES (?, ?, datetime('now'), ?)", (key, pickle.dumps(value), ttl_secs))
    self.dbconn.commit()
    logger.debug("Stored key %s in cache %s (for %d secs)" % (key, self.name, ttl_secs))

    def delete(self, key):
    c = self.dbconn.cursor()
    c.execute("DELETE FROM cache WHERE name = ?", (str(key),))
    logger.debug("Removed key %s from cache %s" % (key, self.name))

    def http_get_json(conn, path):
    print("Fetch http://%s:%d%s" % (conn.host, conn.port, path))
    logger.info("Fetching http://%s:%d%s" % (conn.host, conn.port, path))
    conn.request('GET', path)
    json_data = conn.getresponse().read()
    return json.loads(json_data)

    def http_get_json_simple(url):
    print("Fetch %s" % url)
    logger.info("Fetching %s" % url)
    return json.loads(urllib2.urlopen(url).read())
    19 changes: 14 additions & 5 deletions test_report_1.py
    Original file line number Diff line number Diff line change
    @@ -12,12 +12,13 @@
    from yaptu import Template
    import cgi
    import pygal
    import logging.config

    data = run_query("db.cnf", '''
    SELECT a, b
    FROM test_data;
    ''')
    logging.config.fileConfig('logging.conf')

    logger = logging.getLogger("test_report_1")

    logger.info("Initialized user information cache")
    user_info_cache = Cache("user_info")

    conn = httplib.HTTPConnection('jsonplaceholder.typicode.com')
    @@ -30,11 +31,19 @@ def fetch_user_name():
    def escape_data(row):
    return [cgi.escape(value) for value in row]

    report_data = map(add_user_name, data)
    logger.info("Querying sample data from database")
    report_data = run_query("db.cnf", "SELECT a, b FROM test_data")

    logger.info("Adding user details")
    report_data = map(add_user_name, report_data)

    logger.debug("Escaping HTML characters")
    report_data = map(escape_data, report_data)

    logger.info("Running sample graph")
    config = pygal.Config()
    config.disable_xml_declaration = True
    chart = pygal.Bar(config).add('1', [1, 3, 3, 7]).add('2', [1, 6, 6, 4]).render().encode('utf-8')

    logger.info("Generating report output")
    Template(open('test_report_1.template').read()).render(globals())
  2. crypticmind revised this gist May 1, 2016. 1 changed file with 11 additions and 0 deletions.
    11 changes: 11 additions & 0 deletions test_report_1.py
    Original file line number Diff line number Diff line change
    @@ -1,10 +1,17 @@
    #!/usr/bin/env python

    # Clone https://github.com/Kozea/pygal
    # Run python setup.py build
    # Add lib path here
    import sys
    sys.path.append('/Users/cryptic/dev/pygal/build/lib')

    from reporting import run_query, Cache, http_get_json, http_get_json_simple
    import httplib
    import json
    from yaptu import Template
    import cgi
    import pygal

    data = run_query("db.cnf", '''
    SELECT a, b
    @@ -26,4 +33,8 @@ def escape_data(row):
    report_data = map(add_user_name, data)
    report_data = map(escape_data, report_data)

    config = pygal.Config()
    config.disable_xml_declaration = True
    chart = pygal.Bar(config).add('1', [1, 3, 3, 7]).add('2', [1, 6, 6, 4]).render().encode('utf-8')

    Template(open('test_report_1.template').read()).render(globals())
  3. crypticmind revised this gist Apr 30, 2016. 2 changed files with 36 additions and 18 deletions.
    23 changes: 5 additions & 18 deletions test_report.py → test_report_1.py
    Original file line number Diff line number Diff line change
    @@ -4,6 +4,7 @@
    import httplib
    import json
    from yaptu import Template
    import cgi

    data = run_query("db.cnf", '''
    SELECT a, b
    @@ -19,24 +20,10 @@ def fetch_user_name():
    return http_get_json(conn, '/users/%s' % str(row[1]))['name']
    return row + [user_info_cache.get_or_else(row[1], fetch_user_name, 300)]

    def escape_pipes(row):
    return [value.replace('|', '\|') for value in row]
    def escape_data(row):
    return [cgi.escape(value) for value in row]

    report_data = map(add_user_name, data)
    report_data = map(escape_pipes, report_data)
    report_data = map(escape_data, report_data)

    Template('''
    # Sample Report
    A Table
    Column A | Column B (User ID) | Name
    ---------|--------------------|-----
    {{ for row in report_data:
    @{row[0]} | @{row[1]} | @{row[2]}
    }}
    End of report.
    ''').render(globals())
    Template(open('test_report_1.template').read()).render(globals())
    31 changes: 31 additions & 0 deletions test_report_1.template
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,31 @@
    <html>
    <head>
    <title>Sample Report</title>
    </head>
    <body>

    <h1>Sample Report</h1>

    A Table

    <table>
    <thead>
    <tr>
    <td>Column A</td>
    <td>Column B (User ID)</td>
    <td>Name</td>
    </tr>
    </thead>
    {{ for row in report_data:
    <tr>
    <td>@{row[0]}</td>
    <td>@{row[1]}</td>
    <td>@{row[2]}</td>
    </tr>
    }}
    </table>

    End of report.

    </body>
    </html>
  4. crypticmind revised this gist Apr 30, 2016. 2 changed files with 160 additions and 5 deletions.
    26 changes: 21 additions & 5 deletions report.py → test_report.py
    Original file line number Diff line number Diff line change
    @@ -3,6 +3,7 @@
    from reporting import run_query, Cache, http_get_json, http_get_json_simple
    import httplib
    import json
    from yaptu import Template

    data = run_query("db.cnf", '''
    SELECT a, b
    @@ -16,11 +17,26 @@
    def add_user_name(row):
    def fetch_user_name():
    return http_get_json(conn, '/users/%s' % str(row[1]))['name']
    return row + [user_info_cache.get_or_else(row[1], fetch_user_name, 5)]
    return row + [user_info_cache.get_or_else(row[1], fetch_user_name, 300)]

    report = map(add_user_name, data)
    def escape_pipes(row):
    return [value.replace('|', '\|') for value in row]

    def output(row):
    print(row)
    report_data = map(add_user_name, data)
    report_data = map(escape_pipes, report_data)

    map(output, report)
    Template('''
    # Sample Report
    A Table
    Column A | Column B (User ID) | Name
    ---------|--------------------|-----
    {{ for row in report_data:
    @{row[0]} | @{row[1]} | @{row[2]}
    }}
    End of report.
    ''').render(globals())
    139 changes: 139 additions & 0 deletions yaptu.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,139 @@
    # Yet Another Python Templating Utility, Version 1.2
    # Taken from http://code.activestate.com/recipes/52305/

    import sys

    # utility stuff to avoid tests in the mainline code
    class _nevermatch:
    "Polymorphic with a regex that never matches"
    def match(self, line):
    return None
    _never = _nevermatch() # one reusable instance of it suffices
    def identity(string, why):
    "A do-nothing-special-to-the-input, just-return-it function"
    return string
    def nohandle(string):
    "A do-nothing handler that just re-raises the exception"
    raise

    # and now the real thing
    class copier:
    "Smart-copier (YAPTU) class"
    def copyblock(self, i=0, last=None):
    "Main copy method: process lines [i,last) of block"
    def repl(match, self=self):
    "return the eval of a found expression, for replacement"
    # uncomment for debug: print '!!! replacing',match.group(1)
    expr = self.preproc(match.group(1), 'eval')
    try: return str(eval(expr, self.globals, self.locals))
    except: return str(self.handle(expr))
    block = self.locals['_bl']
    if last is None: last = len(block)
    while i<last:
    line = block[i]
    match = self.restat.match(line)
    if match: # a statement starts "here" (at line block[i])
    # i is the last line to _not_ process
    stat = match.string[match.end(0):].strip()
    j=i+1 # look for 'finish' from here onwards
    nest=1 # count nesting levels of statements
    while j<last:
    line = block[j]
    # first look for nested statements or 'finish' lines
    if self.restend.match(line): # found a statement-end
    nest = nest - 1 # update (decrease) nesting
    if nest==0: break # j is first line to _not_ process
    elif self.restat.match(line): # found a nested statement
    nest = nest + 1 # update (increase) nesting
    elif nest==1: # look for continuation only at this nesting
    match = self.recont.match(line)
    if match: # found a contin.-statement
    nestat = match.string[match.end(0):].strip()
    stat = '%s _cb(%s,%s)\n%s' % (stat,i+1,j,nestat)
    i=j # again, i is the last line to _not_ process
    j=j+1
    stat = self.preproc(stat, 'exec')
    stat = '%s _cb(%s,%s)' % (stat,i+1,j)
    # for debugging, uncomment...: print "-> Executing: {"+stat+"}"
    exec stat in self.globals,self.locals
    i=j+1
    else: # normal line, just copy with substitution
    self.ouf.write(self.regex.sub(repl,line))
    i=i+1
    def __init__(self, regex=_never, dict={},
    restat=_never, restend=_never, recont=_never,
    preproc=identity, handle=nohandle, ouf=sys.stdout):
    "Initialize self's attributes"
    self.regex = regex
    self.globals = dict
    self.locals = { '_cb':self.copyblock }
    self.restat = restat
    self.restend = restend
    self.recont = recont
    self.preproc = preproc
    self.handle = handle
    self.ouf = ouf
    def copy(self, block=None, inf=sys.stdin):
    "Entry point: copy-with-processing a file, or a block of lines"
    if block is None: block = inf.readlines()
    self.locals['_bl'] = block
    self.copyblock()

    class Template:
    def __init__(self, template):
    import re
    rex=re.compile('@{([^}]+)}')
    rbe=re.compile('{{')
    ren=re.compile('}}')
    rco=re.compile('_ ')
    self.copier = copier(rex, {}, rbe, ren, rco)
    self.template = [line + '\n' for line in template.split('\n')]
    def render(self, dict):
    self.copier.globals = dict
    self.copier.copy(self.template)

    if __name__=='__main__':
    t = Template('''
    A first, plain line -- it just gets copied.
    A second line, with @{x} substitutions.
    {{ x+=1 # non-block statements MUST end with comments
    }}
    Now the substitutions are @{x}.
    {{ if x>23:
    After all, @{x} is rather large!
    _ else:
    After all, @{x} is rather small!
    }}
    {{ for i in range(3):
    Also, @{i} times @{x} is @{i*x}.
    }}
    One last, plain line at the end.''')
    t.render({ 'x': 23 })

    # "Test: copy a block of lines, with full processing"
    # import re
    # rex=re.compile('@{([^}]+)}')
    # rbe=re.compile('{{')
    # ren=re.compile('}}')
    # rco=re.compile('_ ')
    # x=23 # just a variable to try substitution
    # cop = copier(rex, globals(), rbe, ren, rco)
    # lines_block = [line+'\n' for line in '''
    # A first, plain line -- it just gets copied.
    # A second line, with @{x} substitutions.
    # {{ x+=1 # non-block statements MUST end with comments
    # }}
    # Now the substitutions are @{x}.
    # {{ if x>23:
    # After all, @{x} is rather large!
    # _ else:
    # After all, @{x} is rather small!
    # }}
    # {{ for i in range(3):
    # Also, @{i} times @{x} is @{i*x}.
    # }}
    # One last, plain line at the end.'''.split('\n')]
    # print "*** input:"
    # print ''.join(lines_block)
    # print "*** output:"
    # cop.copy(lines_block)
  5. crypticmind revised this gist Apr 30, 2016. 4 changed files with 90 additions and 76 deletions.
    6 changes: 2 additions & 4 deletions db.cnf
    Original file line number Diff line number Diff line change
    @@ -1,7 +1,5 @@
    [client]
    host=localhost
    database=test_report
    user=test_report
    password=test_report
    database=test_report
    raw
    batch
    skip-column-names
    34 changes: 15 additions & 19 deletions report.py
    Original file line number Diff line number Diff line change
    @@ -1,30 +1,26 @@
    #!/usr/bin/env python

    import subprocess

    #
    # /opt/local/lib/mysql56/bin/mysql --defaults-extra-file=db.cnf -e "$Q" | sed 's/|/\\|/g;s/ /|/g' >> $1
    #


    def run_query(dbconf, query):
    proc_output = subprocess.check_output([
    '/opt/local/lib/mysql56/bin/mysql',
    '--defaults-extra-file=%s' % dbconf,
    '-e',
    query
    ]).split("\n")[:-1]
    return map(lambda line: line.split("\t"), proc_output)

    from reporting import run_query, Cache, http_get_json, http_get_json_simple
    import httplib
    import json

    data = run_query("db.cnf", '''
    SELECT a, b
    FROM test_data;
    ''')

    user_info_cache = Cache("user_info")

    conn = httplib.HTTPConnection('jsonplaceholder.typicode.com')

    def add_user_name(row):
    def fetch_user_name():
    return http_get_json(conn, '/users/%s' % str(row[1]))['name']
    return row + [user_info_cache.get_or_else(row[1], fetch_user_name, 5)]

    def f(line):
    print(line)
    report = map(add_user_name, data)

    map(f, data)
    def output(row):
    print(row)

    map(output, report)
    53 changes: 0 additions & 53 deletions report.sh
    Original file line number Diff line number Diff line change
    @@ -1,53 +0,0 @@
    #!/bin/bash

    USER=test_report
    DB=test_report
    PASSWORD=test_report

    # $1 = output file
    function append_query_as_markdown {
    IFS='' read -r -d '' Q
    /opt/local/lib/mysql56/bin/mysql --defaults-extra-file=db.cnf -e "$Q" | sed 's/|/\\|/g;s/ /|/g' >> $1
    }

    # $1 = output file
    function append_query_as_csv {
    IFS='' read -r -d '' Q
    /opt/local/lib/mysql56/bin/mysql --defaults-extra-file=db.cnf -e "$Q" | sed 's/^/"/;s/$/"/;s/ /","/g' >> $1
    }


    REPORT1=test_report.md

    cat > $REPORT1 <<REPORT
    # Test Report 1
    A table.
    Column A | Column B
    ---------|---------
    REPORT

    append_query_as_markdown $REPORT1 <<SQL
    SELECT a, b
    FROM test_data;
    SQL

    cat >> $REPORT1 <<REPORT
    End of report.
    REPORT

    REPORT2=test_report.csv

    cat > $REPORT2 <<REPORT
    "Column A", "Column B"
    REPORT

    append_query_as_csv $REPORT2 <<SQL
    SELECT a, b
    FROM test_data;
    SQL

    73 changes: 73 additions & 0 deletions reporting.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,73 @@

    import subprocess
    import urllib2
    import sqlite3
    import pickle
    import httplib
    import urllib2
    import json

    mysql_bin = '/opt/local/lib/mysql56/bin/mysql'


    def run_query(dbconf, query):
    """Run a SQL query and return a list of lists of values."""
    proc_output = subprocess.check_output([
    mysql_bin,
    '--defaults-extra-file=%s' % dbconf,
    '--raw',
    '--batch',
    '--skip-column-names',
    '-e',
    query
    ]).split("\n")[:-1]
    return map(lambda line: line.split("\t"), proc_output)


    class Cache:
    def __init__(self, name):
    self.name = name
    self.dbconn = sqlite3.connect(name + ".db")
    c = self.dbconn.cursor()
    c.execute("CREATE TABLE IF NOT EXISTS cache (name TEXT, value BLOB, created TEXT, ttl INTEGER, PRIMARY KEY(name))")
    self.dbconn.commit()

    def get(self, key):
    c = self.dbconn.cursor()
    c.execute("SELECT value FROM cache WHERE name = ? AND datetime(created, ttl || ' seconds') > datetime('now')", (str(key),))
    row = c.fetchone()
    if row:
    return pickle.loads(row[0])
    else:
    return None

    def get_or_else(self, key, f, ttl_secs):
    value_from_cache = self.get(key)
    if value_from_cache:
    return value_from_cache
    else:
    try:
    value_from_f = f()
    self.put(key, value_from_f, ttl_secs)
    return value_from_f
    except:
    raise

    def put(self, key, value, ttl_secs):
    c = self.dbconn.cursor()
    c.execute("INSERT OR REPLACE INTO cache (name, value, created, ttl) VALUES (?, ?, datetime('now'), ?)", (key, pickle.dumps(value), ttl_secs))
    self.dbconn.commit()

    def delete(self, key):
    c = self.dbconn.cursor()
    c.execute("DELETE FROM cache WHERE name = ?", (str(key),))

    def http_get_json(conn, path):
    print("Fetch http://%s:%d%s" % (conn.host, conn.port, path))
    conn.request('GET', path)
    json_data = conn.getresponse().read()
    return json.loads(json_data)

    def http_get_json_simple(url):
    print("Fetch %s" % url)
    return json.loads(urllib2.urlopen(url).read())
  6. crypticmind revised this gist Apr 29, 2016. 1 changed file with 30 additions and 0 deletions.
    30 changes: 30 additions & 0 deletions report.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,30 @@
    #!/usr/bin/env python

    import subprocess

    #
    # /opt/local/lib/mysql56/bin/mysql --defaults-extra-file=db.cnf -e "$Q" | sed 's/|/\\|/g;s/ /|/g' >> $1
    #


    def run_query(dbconf, query):
    proc_output = subprocess.check_output([
    '/opt/local/lib/mysql56/bin/mysql',
    '--defaults-extra-file=%s' % dbconf,
    '-e',
    query
    ]).split("\n")[:-1]
    return map(lambda line: line.split("\t"), proc_output)


    data = run_query("db.cnf", '''
    SELECT a, b
    FROM test_data;
    ''')


    def f(line):
    print(line)

    map(f, data)

  7. crypticmind created this gist Apr 28, 2016.
    7 changes: 7 additions & 0 deletions db.cnf
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,7 @@
    [client]
    user=test_report
    password=test_report
    database=test_report
    raw
    batch
    skip-column-names
    53 changes: 53 additions & 0 deletions report.sh
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,53 @@
    #!/bin/bash

    USER=test_report
    DB=test_report
    PASSWORD=test_report

    # $1 = output file
    function append_query_as_markdown {
    IFS='' read -r -d '' Q
    /opt/local/lib/mysql56/bin/mysql --defaults-extra-file=db.cnf -e "$Q" | sed 's/|/\\|/g;s/ /|/g' >> $1
    }

    # $1 = output file
    function append_query_as_csv {
    IFS='' read -r -d '' Q
    /opt/local/lib/mysql56/bin/mysql --defaults-extra-file=db.cnf -e "$Q" | sed 's/^/"/;s/$/"/;s/ /","/g' >> $1
    }


    REPORT1=test_report.md

    cat > $REPORT1 <<REPORT
    # Test Report 1
    A table.
    Column A | Column B
    ---------|---------
    REPORT

    append_query_as_markdown $REPORT1 <<SQL
    SELECT a, b
    FROM test_data;
    SQL

    cat >> $REPORT1 <<REPORT
    End of report.
    REPORT

    REPORT2=test_report.csv

    cat > $REPORT2 <<REPORT
    "Column A", "Column B"
    REPORT

    append_query_as_csv $REPORT2 <<SQL
    SELECT a, b
    FROM test_data;
    SQL