Skip to content
Snippets Groups Projects
Commit 68e51195 authored by René Schöne's avatar René Schöne
Browse files

Add scripts to push multiple event logs to influx.

parent 009e872b
No related branches found
No related tags found
1 merge request!1All4one relast
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import os
import subprocess
import yaml
if __name__ == '__main__':
with open('push-multiple-to-influx.yml') as fdr:
content = yaml.load(fdr)
# print content
base_dir = content['base_dir']
python_executable = content['python_executable']
solutions = content['solutions']
sizes = content['sizes']
queries = content['queries']
for solution_name in solutions:
fileformat = solutions[solution_name]['fileformat']
for size in sizes:
size = str(size)
for query in queries:
query = str(query)
filename = os.path.join(base_dir, solution_name, fileformat.replace(r'%size', size).replace(r'%query', query))
if not os.path.exists(filename):
print 'File "{}" not found'.format(filename)
continue
cmd = [python_executable, '-f', filename, '-s', size, '-q', query, '-n', solution_name]
print 'Calling ' + ' '.join(cmd)
subprocess.call(cmd)
base_dir: /data/git/ttc2018liveContest/solutions
python_executable: solve/push-to-influx.py
solutions:
jastadd-ttc18-xml-flush:
fileformat: events-xml-%size-%query.csv
jastadd-ttc18-xml-inc:
fileformat: events-xml-%size-%query.csv
sizes:
- 1
- 2
queries:
- Q1
- Q2
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""Tutorial how to use the class helper `SeriesHelper`."""
import argparse
import csv
from datetime import datetime
import re
......@@ -19,9 +19,6 @@ dbname = 'jastadd'
myclient = InfluxDBClient(host, port, user, password, dbname)
# Uncomment the following code if the database is not yet created
myclient.drop_database(dbname)
myclient.create_database(dbname)
# myclient.create_retention_policy('awesome_policy', '3d', 3, default=True)
......@@ -42,7 +39,7 @@ class MySeriesHelper(SeriesHelper):
fields = ['dummy']
# Defines all the tags for the series.
tags = ['event', 'attribute', 'size', 'query']
tags = ['event', 'attribute', 'size', 'query', 'solution']
# Defines the number of data points to store prior to writing
# on the wire.
......@@ -58,9 +55,13 @@ def nice_tag(s):
return s
def main(filename, size, query):
def main(args):
if args.drop_database:
myclient.drop_database(dbname)
myclient.create_database(dbname)
fieldnames = ['timestamp'] + MySeriesHelper.Meta.tags[:] + MySeriesHelper.Meta.fields[:]
# read <filename> (or solve/<filename> if not found. maybe we are in root directory after all)
filename = args.filename
if not os.path.exists(filename):
filename = os.path.join('solve', filename)
with open(filename) as fdr:
......@@ -71,8 +72,9 @@ def main(filename, size, query):
MySeriesHelper(time=dt.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
event=nice_tag(row['event']),
attribute=nice_tag(row["attribute"]),
size=size,
query=query,
size=args.size,
query=args.query,
solution=args.name,
dummy='1')
# print MySeriesHelper._json_body_()
# 1/0
......@@ -86,8 +88,12 @@ def main(filename, size, query):
if __name__ == '__main__':
if len(sys.argv) <= 3:
print 'Usage: %s <path/to/csv> <size> <query>' % sys.argv[0]
sys.exit(0)
filename, size, query = sys.argv[1:]
main(filename, size, query)
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename", help="Filename to process", required=True)
parser.add_argument("-s", "--size", help="Size of change set", required=True)
parser.add_argument("-q", "--query", help="Computed query", required=True)
parser.add_argument("-n", "--name", help="Name of the solution", default='jastadd-ttc18')
parser.add_argument("--drop_database", help="Whether the database should be dropped beforehand (Default: false)", action='store_true')
args = parser.parse_args()
main(args)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment