Skip to content

Commit

Permalink
Merge pull request #2560 from deniszh/backport/1.1.x/pr-2547_pr-2547_…
Browse files Browse the repository at this point in the history
…pr-2547_pr-2547_pr-2547_pr-2555_pr-2555_pr-2554_pr-2554_pr-2556_pr-2557_pr-2557_pr-2558_pr-2559

[1.1.x] handle exceptions if params cannot be type converted (#2547) | fix minor bug in query param evaluation (#2547) | fails to instantiate find query must be due to bad user input (#2547) | add tests for invalid parameter types (#2547) |
  • Loading branch information
deniszh authored Mar 10, 2020
2 parents ebfe642 + 75fa4dc commit f511380
Show file tree
Hide file tree
Showing 23 changed files with 349 additions and 227 deletions.
28 changes: 18 additions & 10 deletions contrib/demo-collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,33 +2,39 @@
from commands import getstatusoutput
from platform import node
from socket import socket, AF_INET, SOCK_STREAM
from sys import argv, exit
from sys import argv
from time import sleep, time

DELAY = 60
CARBON_SERVER = 'localhost'
CARBON_PORT = 2003


class Carbon:
def __init__(self, hostname, port):
self.s = socket(AF_INET, SOCK_STREAM)
self.hostname = hostname
self.port = int(port)
self.connect()

def connect(self):
try:
self.s.connect((self.hostname, self.port))
except IOError, e:
except IOError as e:
print("connect: ", e)
return
def disconnect(self): self.s.close()

def disconnect(self):
self.s.close()

def send(self, data):
try:
self.s.sendall(data + "\n")
except:
except Exception:
self.connect()
self.s.sendall(data + "\n")


class Host:
def __init__(self):
self.historical = {}
Expand All @@ -53,9 +59,11 @@ def delta_analyzer(self, measurements, data, now):
for line in data:
for measurement, loc in measurements.iteritems():
metric_name = "%s.%s" % (line[0], measurement)
try: value = line[loc]
except: continue
if self.historical.has_key(metric_name):
try:
value = line[loc]
except Exception:
continue
if metric_name in self.historical:
current = value
delta = int(value) - int(self.historical[metric_name][1])
timedelta = time() - self.historical[metric_name][0]
Expand Down Expand Up @@ -147,7 +155,7 @@ def fetch_smb_statistics(self):
for i, block in enumerate(raw_data.split("\n\n")):
if i not in measurements.keys(): continue
raw_data = block.split("\n")
if this_node is not None:
if this_node is not None:
this_node_count = [line.startswith(this_node + ":") for line in raw_data].count(True)
else:
this_node_count = len(raw_data) - 4
Expand All @@ -160,7 +168,7 @@ def main():
host = Host()
hostname = node().split('.')[0]

graphite = Carbon(CARBON_SERVER, CARBON_PORT);
graphite = Carbon(CARBON_SERVER, CARBON_PORT)

while True:
data = host.get_all()
Expand All @@ -170,6 +178,6 @@ def main():
graphite.send(metric)
sleep(DELAY)


if __name__ == '__main__':
main()

30 changes: 22 additions & 8 deletions contrib/memcache_whisper.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,22 @@
For details on the modification, read https://bugs.launchpad.net/graphite/+bug/245835
"""

import os, struct, time
import os
import struct
import sys
import time
try:
import fcntl
CAN_LOCK = True
except ImportError:
CAN_LOCK = False

if sys.version_info[0] == 3:
xrange = range
# This `file` hack avoids linter warnings under Python-3,
# but this code likely only works with Python-2.
file = None

LOCK = False
CACHE_HEADERS = False
__headerCache = {}
Expand All @@ -58,12 +67,15 @@

debug = startBlock = endBlock = lambda *a,**k: None


def exists(path):
return os.path.exists(path)


def drop(path):
os.remove(path)


def enableMemcache(servers = ['127.0.0.1:11211'], min_compress_len = 0):
from StringIO import StringIO
import memcache
Expand All @@ -84,15 +96,17 @@ def close(self):
if self.mode == "r+b" or self.mode == "wb":
MC.set(self.name, self.getvalue(), min_compress_len = min_compress_len)
StringIO.close(self)

def exists(path):
return MC.get(path) != None
return MC.get(path) is not None

def drop(path):
MC.delete(path)


def enableDebug():
global open, debug, startBlock, endBlock

class open(file):
def __init__(self,*args,**kwargs):
file.__init__(self,*args,**kwargs)
Expand Down Expand Up @@ -180,13 +194,13 @@ def create(path,archiveList,xFilesFactor=0.5):
if i == len(archiveList) - 1: break
next = archiveList[i+1]
assert archive[0] < next[0],\
"You cannot configure two archives with the same precision %s,%s" % (archive,next)
"You cannot configure two archives with the same precision %s,%s" % (archive,next)
assert (next[0] % archive[0]) == 0,\
"Higher precision archives' precision must evenly divide all lower precision archives' precision %s,%s" % (archive[0],next[0])
"Higher precision archives' precision must evenly divide all lower precision archives' precision %s,%s" % (archive[0],next[0])
retention = archive[0] * archive[1]
nextRetention = next[0] * next[1]
assert nextRetention > retention,\
"Lower precision archives must cover larger time intervals than higher precision archives %s,%s" % (archive,next)
"Lower precision archives must cover larger time intervals than higher precision archives %s,%s" % (archive,next)
#Looks good, now we create the file and write the header
assert not exists(path), "File %s already exists!" % path
fh = open(path,'wb')
Expand All @@ -211,7 +225,7 @@ def create(path,archiveList,xFilesFactor=0.5):

def __propagate(fh,timestamp,xff,higher,lower):
lowerIntervalStart = timestamp - (timestamp % lower['secondsPerPoint'])
lowerIntervalEnd = lowerIntervalStart + lower['secondsPerPoint']
# lowerIntervalEnd = lowerIntervalStart + lower['secondsPerPoint']
fh.seek(higher['offset'])
packedPoint = fh.read(pointSize)
(higherBaseInterval,higherBaseValue) = struct.unpack(pointFormat,packedPoint)
Expand Down Expand Up @@ -301,7 +315,7 @@ def update(path,value,timestamp=None):
if baseInterval == 0: #This file's first update
fh.seek(archive['offset'])
fh.write(myPackedPoint)
baseInterval,baseValue = myInterval,value
baseInterval,baseValue = myInterval,value # noqa: F841
else: #Not our first update
timeDistance = myInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
Expand Down
4 changes: 2 additions & 2 deletions contrib/test_aggregator_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@
LIB_DIR = join(ROOT_DIR, 'graphite', 'lib')
sys.path.insert(0, LIB_DIR)

from carbon.aggregator.rules import RuleManager
from carbon.aggregator.rules import RuleManager # noqa: E402

### Basic usage
if len(sys.argv) != 3:
print("Usage: %s 'aggregator rule' 'line item'" % (__file__))
print("\nSample invocation: %s %s %s" % \
print("\nSample invocation: %s %s %s" %
(__file__, "'<prefix>.<env>.<key>.sum.all (10) = sum <prefix>.<env>.<<key>>.sum.<node>'", 'stats.prod.js.ktime_sum.sum.host2' ))
sys.exit(42)

Expand Down
10 changes: 1 addition & 9 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,14 @@ provides = graphite
obsoletes = graphite <= 0.9.9

[flake8]
exclude = .tox,contrib
exclude = .tox
ignore =
# E111 indentation is not a multiple of four
E111,
# E114 indentation is not a multiple of four (comment)
E114,
# E121 continuation line under-indented for hanging indent
E121,
# E122 continuation line missing indentation or outdented
E122,
# E124 closing bracket does not match visual indentation
E124,
# E126 continuation line over-indented for hanging indent
E126,
# E128 continuation line under-indented for visual indent
Expand Down Expand Up @@ -68,9 +64,5 @@ ignore =
W504,
# E701 multiple statements on one line (colon)
E701,
# E713 test for membership should be 'not in'
E713,
# E731 do not assign a lambda expression, use a def
E731,
# F841 local variable 'stuff' is assigned to but never used
F841,
14 changes: 14 additions & 0 deletions webapp/graphite/errors.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,21 @@
from django.http import HttpResponseBadRequest


class NormalizeEmptyResultError(Exception):
# throw error for normalize() when empty
pass


class InputParameterError(ValueError):
pass


# decorator which turns InputParameterExceptions into Django's HttpResponseBadRequest
def handleInputParameterError(f):
def new_f(*args, **kwargs):
try:
return f(*args, **kwargs)
except InputParameterError as e:
return HttpResponseBadRequest('Bad Request: {err}'.format(err=e))

return new_f
31 changes: 17 additions & 14 deletions webapp/graphite/functions/params.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,15 +179,28 @@ def toJSON(self):
jsonVal['suggestions'] = self.suggestions
return jsonVal

def validateValue(self, value):
def validateValue(self, value, func):
# if value isn't specified and there's a default then the default will be used,
# we don't need to validate the default value because we trust that it is valid
if value is None and self.default is not None:
value = self.default
return True

# None is ok for optional params
if not self.required and value is None:
return True

return self.type.isValid(value)
# parameter is restricted to a defined set of values, but value is not in it
if self.options and value not in self.options:
raise InputParameterError(
'Invalid option specified for function "{func}" parameter "{param}": {value}'.format(
func=func, param=self.name, value=repr(value)))

if not self.type.isValid(value):
raise InputParameterError(
'Invalid "{type}" value specified for function "{func}" parameter "{param}": {value}'.format(
type=self.type.name, func=func, param=self.name, value=repr(value)))

return True


def validateParams(func, params, args, kwargs):
Expand Down Expand Up @@ -218,17 +231,7 @@ def validateParams(func, params, args, kwargs):
# requirement is satisfied from "args"
value = args[i]

# parameter is restricted to a defined set of values, but value is not in it
if params[i].options and value not in params[i].options:
raise InputParameterError(
'Invalid option specified for function "{func}" parameter "{param}": {value}'.format(
func=func, param=params[i].name, value=repr(value)))

if not params[i].validateValue(value):
raise InputParameterError(
'Invalid "{type}" value specified for function "{func}" parameter "{param}": {value}'.format(
type=params[i].type.name, func=func, param=params[i].name, value=repr(value)))

params[i].validateValue(value, func)
valid_args.append(params[i].name)

return True
15 changes: 2 additions & 13 deletions webapp/graphite/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,8 @@
import os
import logging
from logging.handlers import TimedRotatingFileHandler as Rotater
try:
from logging import NullHandler
except ImportError as ie: # py2.6
from logging import Handler

class NullHandler(Handler):

def emit(self, record):
pass
try:
from logging import FileHandler, StreamHandler
except ImportError as ie: # py2.6
from logging.handlers import FileHandler, StreamHandler
from logging import NullHandler, FileHandler, StreamHandler

from django.conf import settings


Expand Down
Loading

0 comments on commit f511380

Please sign in to comment.