From 6bb42eca8972982888871dba509898a88cfe8033 Mon Sep 17 00:00:00 2001 From: Denys Zhdanov Date: Sun, 11 Apr 2021 19:08:01 +0200 Subject: [PATCH 1/4] Backport changes for 1.1.8 --- bin/run-graphite-devel-server.py | 15 +- check-dependencies.py | 146 +++-- contrib/test_aggregator_rules.py | 17 +- docs/conf.py | 22 +- docs/config-local-settings.rst | 6 + docs/config-webapp.rst | 77 ++- docs/index.rst | 1 + docs/install-pip.rst | 4 +- docs/install-synthesize.rst | 11 +- docs/tools.rst | 4 + docs/whisper.rst | 2 +- examples/example-client.py | 58 +- requirements.txt | 4 +- setup.cfg | 21 +- setup.py | 16 +- tox.ini | 6 +- webapp/content/js/composer_widgets.js | 12 +- webapp/content/js/dashboard.js | 10 +- webapp/graphite/account/admin.py | 4 +- webapp/graphite/account/ldapBackend.py | 86 +-- webapp/graphite/account/models.py | 40 +- webapp/graphite/account/views.py | 58 +- webapp/graphite/browser/views.py | 29 +- webapp/graphite/carbonlink.py | 338 +++++----- webapp/graphite/composer/views.py | 86 +-- webapp/graphite/dashboard/models.py | 64 +- webapp/graphite/errors.py | 100 ++- webapp/graphite/events/views.py | 8 +- webapp/graphite/finders/__init__.py | 26 +- webapp/graphite/finders/standard.py | 62 +- webapp/graphite/functions/__init__.py | 121 ++-- webapp/graphite/functions/aggfuncs.py | 10 +- webapp/graphite/functions/params.py | 376 ++++++----- webapp/graphite/functions/safe.py | 131 ++-- webapp/graphite/functions/views.py | 86 +-- webapp/graphite/intervals.py | 204 +++--- webapp/graphite/local_settings.py.example | 9 +- webapp/graphite/logger.py | 138 ++-- webapp/graphite/metrics/views.py | 7 +- webapp/graphite/node.py | 52 +- webapp/graphite/readers/remote.py | 161 +++-- webapp/graphite/render/attime.py | 304 ++++----- webapp/graphite/render/datalib.py | 24 + webapp/graphite/render/evaluator.py | 59 +- webapp/graphite/render/functions.py | 216 ++++++- webapp/graphite/render/glyph.py | 16 +- webapp/graphite/render/grammar.py | 154 +---- webapp/graphite/render/hashing.py | 226 +++---- webapp/graphite/render/views.py | 10 +- webapp/graphite/settings.py | 110 ++-- webapp/graphite/storage.py | 192 +++--- webapp/graphite/tags/base.py | 559 ++++++++-------- webapp/graphite/tags/http.py | 280 ++++---- webapp/graphite/tags/localdatabase.py | 602 +++++++++--------- webapp/graphite/tags/models.py | 26 +- webapp/graphite/tags/redis.py | 496 +++++++-------- webapp/graphite/tags/utils.py | 280 ++++---- webapp/graphite/tags/views.py | 196 +++--- webapp/graphite/url_shortener/baseconv.py | 2 +- webapp/graphite/user_util.py | 28 +- webapp/graphite/util.py | 9 +- webapp/graphite/version/views.py | 8 +- webapp/graphite/views.py | 6 +- webapp/graphite/whitelist/views.py | 57 +- webapp/graphite/worker_pool/pool.py | 175 +++-- webapp/tests/base.py | 4 +- webapp/tests/funcplugins/plugin.py | 8 +- webapp/tests/funcplugins/plugin_bad_param.py | 8 +- .../tests/funcplugins/plugin_bad_paramtype.py | 4 +- webapp/tests/funcplugins/plugin_no_group.py | 4 +- webapp/tests/funcplugins/plugin_no_params.py | 4 +- .../funcplugins/plugin_string_paramtype.py | 4 +- webapp/tests/test_attime.py | 16 +- webapp/tests/test_finders.py | 32 +- webapp/tests/test_functions.py | 259 ++++++-- webapp/tests/test_metrics.py | 33 +- webapp/tests/test_params.py | 238 ++++++- webapp/tests/test_readers_ceres.py | 4 +- webapp/tests/test_readers_multi.py | 4 +- webapp/tests/test_readers_remote.py | 8 +- webapp/tests/test_readers_whisper.py | 16 +- webapp/tests/test_render.py | 101 +-- webapp/tests/test_util.py | 55 +- webapp/tests/test_whitelist.py | 2 +- webapp/tests/test_worker_pool.py | 12 +- 85 files changed, 4043 insertions(+), 3436 deletions(-) diff --git a/bin/run-graphite-devel-server.py b/bin/run-graphite-devel-server.py index deb26eb49..6ef3397cb 100755 --- a/bin/run-graphite-devel-server.py +++ b/bin/run-graphite-devel-server.py @@ -13,21 +13,22 @@ option_parser.add_option('--interface', default='0.0.0.0', action='store', help='Interface to listen on') option_parser.add_option('--libs', default=None, help='Path to the directory containing the graphite python package') option_parser.add_option('--noreload', action='store_true', help='Disable monitoring for changes') +option_parser.add_option('--settings', default='graphite.settings', help='configure alternative settings module') (options, args) = option_parser.parse_args() if not args: - option_parser.print_usage() - sys.exit(1) + option_parser.print_usage() + sys.exit(1) graphite_root = args[0] python_path = os.path.join(graphite_root, 'webapp') if options.libs: - libdir = os.path.expanduser(options.libs) - print('Adding %s to your PYTHONPATH' % libdir) - os.environ['PYTHONPATH'] = libdir + ':' + os.environ.get('PYTHONPATH','') + libdir = os.path.expanduser(options.libs) + print('Adding %s to your PYTHONPATH' % libdir) + os.environ['PYTHONPATH'] = libdir + ':' + os.environ.get('PYTHONPATH','') print("Running Graphite from %s under django development server\n" % graphite_root) @@ -35,12 +36,12 @@ 'django-admin.py', 'runserver', '--pythonpath', python_path, - '--settings', 'graphite.settings', + '--settings', options.settings, '%s:%d' % (options.interface, options.port) ] if options.noreload: - command.append('--noreload') + command.append('--noreload') print(' '.join(command)) diff --git a/check-dependencies.py b/check-dependencies.py index f89c4ae6b..181461a66 100755 --- a/check-dependencies.py +++ b/check-dependencies.py @@ -3,9 +3,9 @@ import sys if sys.version_info <= (2,7): - # SystemExit defaults to returning 1 when printing a string to stderr - raise SystemExit("You are using python %s, but version 2.7 or greater is " - "required" % sys.version_info) + # SystemExit defaults to returning 1 when printing a string to stderr + raise SystemExit("You are using python %s, but version 2.7 or greater is " + "required" % sys.version_info) required = 0 optional = 0 @@ -13,121 +13,139 @@ # Test for whisper try: - import whisper + import whisper except ImportError: - # No? test for ceres - try: - import ceres - # We imported ceres, but not whisper so it's an optional dependency - sys.stderr.write("[OPTIONAL] Unable to import the 'whisper' module. Without it the webapp will be unable to read .wsp files\n") - optional += 1 - except ImportError: - sys.stderr.write("[REQUIRED] Unable to import the 'whisper' or 'ceres' modules, please download this package from the Graphite project page and install it.\n") - required += 1 + # No? test for ceres + try: + import ceres + # We imported ceres, but not whisper so it's an optional dependency + sys.stderr.write("[OPTIONAL] Unable to import the 'whisper' module. " + "Without it the webapp will be unable to read .wsp files\n") + optional += 1 + except ImportError: + sys.stderr.write("[REQUIRED] Unable to import the 'whisper' or 'ceres' modules, " + "please download this package from the Graphite project page and install it.\n") + required += 1 # Test for cairocffi or pycairo try: - import cairocffi as cairo + import cairocffi as cairo except ImportError: - sys.stderr.write("[REQUIRED] Unable to import the 'cairocffi' module, attempting to fall back to pycairo\n") - try: - import cairo - except ImportError: - sys.stderr.write("[REQUIRED] Unable to import the 'cairo' module, do you have pycairo installed for python %s?\n" % sys.version_info.major) - cairo = None - required += 1 + sys.stderr.write("[REQUIRED] Unable to import the 'cairocffi' module, attempting to fall back to pycairo\n") + try: + import cairo + except ImportError: + sys.stderr.write("[REQUIRED] Unable to import the 'cairo' module, " + "do you have pycairo installed for python %s?\n" % sys.version_info.major) + cairo = None + required += 1 # Test that pycairo has the PNG backend try: - if cairo: - surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10) - del surface + if cairo: + surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10) + del surface except Exception: - sys.stderr.write("[REQUIRED] Failed to create an ImageSurface with cairo, you probably need to recompile cairo with PNG support\n") - required += 1 + sys.stderr.write("[REQUIRED] Failed to create an ImageSurface with cairo, " + "you probably need to recompile cairo with PNG support\n") + required += 1 # Test that cairo can find fonts try: - if cairo: - surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10) - context = cairo.Context(surface) - context.font_extents() - del surface, context + if cairo: + surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10) + context = cairo.Context(surface) + context.font_extents() + del surface, context except Exception: - sys.stderr.write("[REQUIRED] Failed to create text with cairo, this probably means cairo cant find any fonts. Install some system fonts and try again\n") + sys.stderr.write("[REQUIRED] Failed to create text with cairo, " + "this probably means cairo cant find any fonts. " + "Install some system fonts and try again\n") # Test for django try: - import django + import django except ImportError: - sys.stderr.write("[REQUIRED] Unable to import the 'django' module, do you have Django installed for python %s?\n" % sys.version_info.major) - django = None - required += 1 + sys.stderr.write("[REQUIRED] Unable to import the 'django' module, " + "do you have Django installed for python %s?\n" % sys.version_info.major) + django = None + required += 1 # Test for pytz try: - import pytz + import pytz except ImportError: - sys.stderr.write("[REQUIRED] Unable to import the 'pytz' module, do you have pytz module installed for python %s?\n" % sys.version_info.major) - required += 1 + sys.stderr.write("[REQUIRED] Unable to import the 'pytz' module, " + "do you have pytz module installed for python %s?\n" % sys.version_info.major) + required += 1 # Test for pyparsing try: - import pyparsing + import pyparsing except ImportError: - sys.stderr.write("[REQUIRED] Unable to import the 'pyparsing' module, do you have pyparsing module installed for python %s?\n" % sys.version_info.major) - required += 1 + sys.stderr.write("[REQUIRED] Unable to import the 'pyparsing' module, " + "do you have pyparsing module installed for python %s?\n" % sys.version_info.major) + required += 1 # Test for django-tagging try: - import tagging + import tagging except ImportError: - sys.stderr.write("[REQUIRED] Unable to import the 'tagging' module, do you have django-tagging installed for python %s?\n" % sys.version_info.major) - required += 1 + sys.stderr.write("[REQUIRED] Unable to import the 'tagging' module, " + "do you have django-tagging installed for python %s?\n" % sys.version_info.major) + required += 1 if django and django.VERSION[:2] < (1,8): - sys.stderr.write("[REQUIRED] You have django version %s installed, but version 1.8 or greater is required\n" % django.get_version()) - required += 1 + sys.stderr.write("[REQUIRED] You have django version %s installed, " + "but version 1.8 or greater is required\n" % django.get_version()) + required += 1 # Test for python-memcached try: - import memcache + import memcache except ImportError: - sys.stderr.write("[OPTIONAL] Unable to import the 'memcache' module, do you have python-memcached installed for python %s? This feature is not required but greatly improves performance.\n" % sys.version_info.major) - optional += 1 + sys.stderr.write("[OPTIONAL] Unable to import the 'memcache' module, " + "do you have python-memcached installed for python %s? " + "This feature is not required but greatly improves performance.\n" % sys.version_info.major) + optional += 1 # Test for python-ldap try: - import ldap + import ldap except ImportError: - sys.stderr.write("[OPTIONAL] Unable to import the 'ldap' module, do you have python-ldap installed for python %s? Without python-ldap, you will not be able to use LDAP authentication in the graphite webapp.\n" % sys.version_info.major) - optional += 1 + sys.stderr.write("[OPTIONAL] Unable to import the 'ldap' module, " + "do you have python-ldap installed for python %s? " + "Without python-ldap, you will not be able to use " + "LDAP authentication in the graphite webapp.\n" % sys.version_info.major) + optional += 1 # Test for txamqp try: - import txamqp + import txamqp except ImportError: - sys.stderr.write("[OPTIONAL] Unable to import the 'txamqp' module, this is required if you want to use AMQP as an input to Carbon. Note that txamqp requires python 2.5 or greater.\n") - optional += 1 + sys.stderr.write("[OPTIONAL] Unable to import the 'txamqp' module, " + "this is required if you want to use AMQP as an input to Carbon. " + "Note that txamqp requires python 2.5 or greater.\n") + optional += 1 # Test for python-rrdtool try: - import rrdtool + import rrdtool except ImportError: - sys.stderr.write("[OPTIONAL] Unable to import the 'python-rrdtool' module, this is required for reading RRD.\n") - optional += 1 + sys.stderr.write("[OPTIONAL] Unable to import the 'python-rrdtool' module, this is required for reading RRD.\n") + optional += 1 # Test for whitenoise @@ -147,15 +165,15 @@ if optional: - sys.stderr.write("%d optional dependencies not met. Please consider the optional items before proceeding.\n" % optional) + sys.stderr.write("%d optional dependencies not met. Please consider the optional items before proceeding.\n" % optional) else: - print("All optional dependencies are met.") + print("All optional dependencies are met.") if required: - sys.stderr.write("%d necessary dependencies not met. Graphite will not function until these dependencies are fulfilled.\n" % required) - sys.exit(1) + sys.stderr.write("%d necessary dependencies not met. Graphite will not function until these dependencies are fulfilled.\n" % required) + sys.exit(1) else: - print("All necessary dependencies are met.") + print("All necessary dependencies are met.") # suppress unused-import warnings diff --git a/contrib/test_aggregator_rules.py b/contrib/test_aggregator_rules.py index e93c3ea66..62dfe0383 100644 --- a/contrib/test_aggregator_rules.py +++ b/contrib/test_aggregator_rules.py @@ -12,10 +12,13 @@ ### Basic usage if len(sys.argv) != 3: - print("Usage: %s 'aggregator rule' 'line item'" % (__file__)) - print("\nSample invocation: %s %s %s" % - (__file__, "'...sum.all (10) = sum ..<>.sum.'", 'stats.prod.js.ktime_sum.sum.host2' )) - sys.exit(42) + print("Usage: %s 'aggregator rule' 'line item'" % (__file__)) + print("\nSample invocation: %s %s %s" % ( + __file__, + "'...sum.all (10) = sum ..<>.sum.'", + 'stats.prod.js.ktime_sum.sum.host2' + )) + sys.exit(42) ### cli arguments me, raw_rule, raw_metric = sys.argv @@ -36,8 +39,8 @@ print("Raw metric: %s" % raw_metric) if match: - print("Match dict: %s" % match.groupdict()) - print("Result: %s" % rule.output_template % match.groupdict()) + print("Match dict: %s" % match.groupdict()) + print("Result: %s" % rule.output_template % match.groupdict()) else: - print("ERROR: NO MATCH") + print("ERROR: NO MATCH") diff --git a/docs/conf.py b/docs/conf.py index 94ffd5901..04111404f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -43,22 +43,22 @@ # Define a custom autodoc documenter for the render.functions module # This will remove the requestContext parameter which doesnt make sense in the context of the docs class RenderFunctionDocumenter(autodoc.FunctionDocumenter): - priority = 10 # Override FunctionDocumenter + priority = 10 # Override FunctionDocumenter - @classmethod - def can_document_member(cls, member, membername, isattr, parent): - return autodoc.FunctionDocumenter.can_document_member(member, membername, isattr, parent) and \ - parent.name == 'graphite.render.functions' + @classmethod + def can_document_member(cls, member, membername, isattr, parent): + return autodoc.FunctionDocumenter.can_document_member(member, membername, isattr, parent) \ + and parent.name == 'graphite.render.functions' - def format_args(self): - args = autodoc.FunctionDocumenter.format_args(self) - if args is not None: - # Really, a regex sub here is by far the easiest way - return re.sub('requestContext, ','',args) + def format_args(self): + args = autodoc.FunctionDocumenter.format_args(self) + if args is not None: + # Really, a regex sub here is by far the easiest way + return re.sub('requestContext, ','',args) def setup(app): - app.add_autodocumenter(RenderFunctionDocumenter) + app.add_autodocumenter(RenderFunctionDocumenter) # -- General configuration ----------------------------------------------------- diff --git a/docs/config-local-settings.rst b/docs/config-local-settings.rst index f5580100e..fcd5f6f1c 100644 --- a/docs/config-local-settings.rst +++ b/docs/config-local-settings.rst @@ -53,6 +53,12 @@ LOG_CACHE_PERFORMANCE Triggers the creation of ``cache.log`` which logs timings for remote calls to `carbon-cache` as well as Request Cache (memcached) hits and misses. +LOG_INFO_PERFORMANCE + `Default: True` + + Triggers the creation of ``info.log`` which logs general logs. Enabled by default. + + DEBUG = True `Default: False` diff --git a/docs/config-webapp.rst b/docs/config-webapp.rst index e5d4752f1..880738ef1 100644 --- a/docs/config-webapp.rst +++ b/docs/config-webapp.rst @@ -31,6 +31,75 @@ On Debian-based systems, run: sudo apt install gunicorn +Next, create the script that will run graphite-web using your process watcher +of choice. + +*Upstart* + +:: + + description "graphite-web server" + start on runlevel [2345] + stop on runlevel [!2345] + + respawn + + exec gunicorn wsgi --pythonpath=/opt/graphite/webapp/graphite --bind 127.0.0.1:8080 + +*Supervisor* + +:: + + [program:graphite-web] + command = gunicorn wsgi --pythonpath=/opt/graphite/webapp/graphite --bind 127.0.0.1:8080 + autostart = true + autorestart = true + +*systemd* + +:: + + # This is /etc/systemd/system/graphite-web.socket + [Unit] + Description=graphite-web socket + + [Socket] + ListenStream=/run/graphite-api.sock + ListenStream=127.0.0.1:8080 + + [Install] + WantedBy=sockets.target + +:: + + # This is /etc/systemd/system/graphite-web.service + [Unit] + Description=graphite-web service + Requires=graphite-web.socket + + [Service] + ExecStart=/usr/bin/gunicorn wsgi --pythonpath=/opt/graphite/webapp/graphite --bind 127.0.0.1:8080 + Restart=on-failure + #User=graphite + #Group=graphite + ExecReload=/bin/kill -s HUP $MAINPID + ExecStop=/bin/kill -s TERM $MAINPID + PrivateTmp=true + + [Install] + WantedBy=multi-user.target + +.. note:: + + If you have installed graphite-web and Gunicorn in a virtualenv, you + need to use the full path to Gunicorn. Instead of ``gunicorn``, use + ``/opt/graphite/bin/gunicorn`` (assuming your virtualenv is + at ``/opt/graphite``). + +See the `Gunicorn docs`_ for configuration options and command-line flags. + +.. _Gunicorn docs: http://docs.gunicorn.org/en/latest/ + Install nginx ^^^^^^^^^^^^^ @@ -161,9 +230,9 @@ Finally, configure the apache vhost. (You can find example of Graphite vhost con WSGIScriptAlias / /opt/graphite/conf/graphite.wsgi - Alias /static/ /opt/graphite/static/ + Alias /static/ /opt/graphite/webapp/content/; - + Order deny,allow Allow from all @@ -271,8 +340,8 @@ Enable the vhost and restart nginx: $ service nginx restart -Acnowlegments -------------_ +Acknowlegments +^^^^^^^^^^^^^^ Portions of that manual are based on `Graphite-API deployment manual`_. diff --git a/docs/index.rst b/docs/index.rst index 6652f408e..83f87c1f0 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -17,6 +17,7 @@ Graphite Documentation admin-webapp composer render_api + metrics_api functions dashboard whisper diff --git a/docs/install-pip.rst b/docs/install-pip.rst index c845e3957..06d12a996 100644 --- a/docs/install-pip.rst +++ b/docs/install-pip.rst @@ -44,7 +44,7 @@ configuration in ``/usr/share/graphite``: .. code-block:: none - pip install https://github.com/graphite-project/carbon/tarball/master --install-option="--install-scripts=/usr/bin" --install-option="--install-lib=/usr/lib/python2.6/site-packages" --install-option="--install-data=/var/lib/graphite" + pip install https://github.com/graphite-project/carbon/tarball/master --install-option="--install-scripts=/usr/bin" --install-option="--install-lib=/usr/lib/python2.6/site-packages" --install-option="--install-data=/usr/share/graphite" Installing Graphite-web in a Custom Location -------------------------------------------- @@ -65,7 +65,7 @@ To install the Graphite-web code into the system-wide site-packages directory wi .. code-block:: none - pip install https://github.com/graphite-project/graphite-web/tarball/master --install-option="--install-scripts=/usr/bin" --install-option="--install-lib=/usr/lib/python2.6/site-packages" --install-option="--install-data=/var/lib/graphite" + pip install https://github.com/graphite-project/graphite-web/tarball/master --install-option="--install-scripts=/usr/bin" --install-option="--install-lib=/usr/lib/python2.6/site-packages" --install-option="--install-data=/usr/share/graphite" Installing Ceres ---------------- diff --git a/docs/install-synthesize.rst b/docs/install-synthesize.rst index f48b3327c..e417e46db 100644 --- a/docs/install-synthesize.rst +++ b/docs/install-synthesize.rst @@ -1,6 +1,15 @@ Installing From Synthesize ========================== -`Synthesize `_ is a script dedicated to making Graphite easy to install. As of this writing, the default installation provides Graphite 0.9.15 for Ubuntu Linux 14.04 LTS with an experimental release candidate for tracking Graphite ``HEAD``. Users may run the installation script manually, or they can choose to use the provided Vagrantfile. +`Synthesize `_ is a script dedicated to making Graphite easy to install. As of this writing, the default installation provides Graphite 1.1.7 for Ubuntu Linux 18.04 LTS with an experimental release candidate for tracking Graphite ``HEAD``. Users may run the installation script manually, or they can choose to use the provided Vagrantfile. For detailed instructions, please refer to the official project documentation on the `Synthesize `_ website. + +Installing From RESynthesize +============================ + +`REsynthesize `_ is a script forked from Synthesize to making Graphite easy to install in CentOS distributions. At the moment just working on CentOS 8.2. It provides Graphite 1.1.7 and Grafana 7.1.3. Users can run the installation script manually, executing: + +`./resynthesize -i` + +For more information, refer to `REsynthesize `_ diff --git a/docs/tools.rst b/docs/tools.rst index 4f292ff61..b55e3469d 100644 --- a/docs/tools.rst +++ b/docs/tools.rst @@ -60,6 +60,9 @@ Collection `netdata`_ A fast and efficient monitoring agent that supports graphite backends. It has collection, forwarding, visualization and monitoring features. Netdata collects common system metrics and a variety of other sources through plugins. +`Promitor`_ + Bringing Azure Monitor metrics where you need them, allowing you to push Azure Monitor metrics to a variety of metric sinks such as a StatsD server. + `Sensu`_ A monitoring framework that can route metrics to Graphite. Servers subscribe to sets of checks, so getting metrics from a new server to Graphite is as simple as installing the Sensu client and subscribing. @@ -409,6 +412,7 @@ Other .. _pipe-to-graphite: https://github.com/iFixit/pipe-to-graphite .. _Polymur: https://github.com/jamiealquiza/polymur .. _Prometheus: https://github.com/prometheus/prometheus +.. _Promitor: https://promitor.io/ .. _RabbitMQ: http://www.rabbitmq.com .. _rearview: http://github.com/livingsocial/rearview .. _Rickshaw: http://code.shutterstock.com/rickshaw diff --git a/docs/whisper.rst b/docs/whisper.rst index 1e485e55c..474f1235e 100644 --- a/docs/whisper.rst +++ b/docs/whisper.rst @@ -15,7 +15,7 @@ Python `float() `_ function in the same way for special strings such as ``'inf'``. Maximum and minimum values are determined by the Python interpreter's allowable range for float values which can be found by executing:: - python -c 'import sys; print sys.float_info' + python -c 'import sys; print(sys.float_info)' Archives: Retention and Precision diff --git a/examples/example-client.py b/examples/example-client.py index 708963b07..ce1659565 100644 --- a/examples/example-client.py +++ b/examples/example-client.py @@ -25,41 +25,43 @@ delay = 60 if len(sys.argv) > 1: - delay = int( sys.argv[1] ) + delay = int( sys.argv[1] ) def get_loadavg(): - # For more details, "man proc" and "man uptime" - if platform.system() == "Linux": - return open('/proc/loadavg').read().strip().split()[:3] - else: - command = "uptime" - process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True) - os.waitpid(process.pid, 0) - output = process.stdout.read().replace(',', ' ').strip().split() - length = len(output) - return output[length - 3:length] + # For more details, "man proc" and "man uptime" + if platform.system() == "Linux": + return open('/proc/loadavg').read().strip().split()[:3] + else: + command = "uptime" + process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True) + os.waitpid(process.pid, 0) + output = process.stdout.read().replace(',', ' ').strip().split() + length = len(output) + return output[length - 3:length] sock = socket() try: - sock.connect( (CARBON_SERVER,CARBON_PORT) ) + sock.connect( (CARBON_SERVER,CARBON_PORT) ) except Exception: - print("Couldn't connect to %(server)s on port %(port)d, is carbon-agent.py running?" % { 'server':CARBON_SERVER, 'port':CARBON_PORT }) - sys.exit(1) + print("Couldn't connect to %(server)s on port %(port)d, is carbon-agent.py running?" % { + 'server': CARBON_SERVER, 'port': CARBON_PORT + }) + sys.exit(1) while True: - now = int( time.time() ) - lines = [] - #We're gonna report all three loadavg values - loadavg = get_loadavg() - lines.append("system.loadavg_1min %s %d" % (loadavg[0],now)) - lines.append("system.loadavg_5min %s %d" % (loadavg[1],now)) - lines.append("system.loadavg_15min %s %d" % (loadavg[2],now)) - message = '\n'.join(lines) + '\n' #all lines must end in a newline - print("sending message\n") - print('-' * 80) - print(message) - print() - sock.sendall(message) - time.sleep(delay) + now = int( time.time() ) + lines = [] + #We're gonna report all three loadavg values + loadavg = get_loadavg() + lines.append("system.loadavg_1min %s %d" % (loadavg[0],now)) + lines.append("system.loadavg_5min %s %d" % (loadavg[1],now)) + lines.append("system.loadavg_15min %s %d" % (loadavg[2],now)) + message = '\n'.join(lines) + '\n' #all lines must end in a newline + print("sending message\n") + print('-' * 80) + print(message) + print() + sock.sendall(message) + time.sleep(delay) diff --git a/requirements.txt b/requirements.txt index 9c7978225..b57c847d2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -43,12 +43,12 @@ txAMQP==0.8 django-tagging==0.4.6 gunicorn pytz -pyparsing +pyparsing>=2.3.0 cairocffi git+git://github.com/graphite-project/whisper.git#egg=whisper # Ceres is optional # git+git://github.com/graphite-project/ceres.git#egg=ceres -whitenoise +whitenoise==4.1.4 scandir urllib3 six diff --git a/setup.cfg b/setup.cfg index 916230f13..502a1a20b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,11 +19,24 @@ obsoletes = graphite <= 0.9.9 [flake8] exclude = .tox +# ignore 4-space-indent rules for biggest files with most lines to change +per-file-ignores = + webapp/graphite/util.py:E111,E114 + webapp/graphite/browser/views.py:E111,E114 + webapp/graphite/metrics/views.py:E111,E114 + webapp/graphite/dashboard/views.py:E111,E114 + webapp/graphite/render/evaluator.py:E111,E114 + webapp/graphite/render/functions.py:E111,E114 + webapp/graphite/render/datalib.py:E111,E114 + webapp/graphite/render/glyph.py:E111,E114 + webapp/graphite/render/views.py:E111,E114 + webapp/tests/test_tags.py:E111,E114 + webapp/tests/test_storage.py:E111,E114 + webapp/tests/test_render_glyph.py:E111,E114 + webapp/tests/test_render_datalib.py:E111,E114 + webapp/tests/test_finders_remote.py:E111,E114 + contrib/memcache_whisper.py:E111,E114 ignore = - # E111 indentation is not a multiple of four - E111, - # E114 indentation is not a multiple of four (comment) - E114, # E121 continuation line under-indented for hanging indent E121, # E126 continuation line over-indented for hanging indent diff --git a/setup.py b/setup.py index a1a21f104..68f60200a 100644 --- a/setup.py +++ b/setup.py @@ -48,25 +48,25 @@ cf.write(f) if os.environ.get('USE_SETUPTOOLS'): - from setuptools import setup - setup_kwargs = dict(zip_safe=0) + from setuptools import setup + setup_kwargs = dict(zip_safe=0) else: - from distutils.core import setup - setup_kwargs = dict() + from distutils.core import setup + setup_kwargs = dict() storage_dirs = [] for subdir in ('whisper/dummy.txt', 'ceres/dummy.txt', 'rrd/dummy.txt', 'log/dummy.txt', 'log/webapp/dummy.txt'): - storage_dirs.append( ('storage/%s' % subdir, []) ) + storage_dirs.append( ('storage/%s' % subdir, []) ) webapp_content = defaultdict(list) for root, dirs, files in os.walk('webapp/content'): - for filename in files: - filepath = os.path.join(root, filename) - webapp_content[root].append(filepath) + for filename in files: + filepath = os.path.join(root, filename) + webapp_content[root].append(filepath) conf_files = [ ('conf', glob('conf/*.example')) ] examples = [ ('examples', glob('examples/example-*')) ] diff --git a/tox.ini b/tox.ini index 3303703a7..9f3aa744a 100644 --- a/tox.ini +++ b/tox.ini @@ -62,8 +62,8 @@ commands = [testenv:lint] basepython = python3.8 +changedir = {toxinidir} deps = - flake8 + flake8==3.7.9 commands = - flake8 --version - flake8 --show-source {toxinidir} + flake8 --show-source diff --git a/webapp/content/js/composer_widgets.js b/webapp/content/js/composer_widgets.js index e1ce36c7a..430ab33c8 100644 --- a/webapp/content/js/composer_widgets.js +++ b/webapp/content/js/composer_widgets.js @@ -515,7 +515,14 @@ var GraphDataWindow = { hideHeaders: true, width: 385, height: 140, - columns: [ {header: 'Graph Targets', width: 1.0, dataIndex: 'value'} ], + columns: [ + { + header: 'Graph Targets', + width: 1.0, + dataIndex: 'value', + tpl: '{value:htmlEncode}' + } + ], listeners: { contextmenu: this.targetContextMenu, afterrender: this.targetChanged, @@ -1074,8 +1081,10 @@ function createFunctionsMenu() { text: 'Combine', menu: [ {text: 'Sum', handler: applyFuncToAll('sumSeries')}, + {text: 'Sum (of series lists)', handler: applyFuncToAll('sumSeriesLists')}, {text: 'Average', handler: applyFuncToAll('averageSeries')}, {text: 'Product', handler: applyFuncToAll('multiplySeries')}, + {text: 'Product (of series lists)', handler: applyFuncToAll('multiplySeriesLists')}, {text: 'Min Values', handler: applyFuncToAll('minSeries')}, {text: 'Max Values', handler: applyFuncToAll('maxSeries')}, {text: 'Group', handler: applyFuncToAll('group')}, @@ -1127,6 +1136,7 @@ function createFunctionsMenu() { {text: 'Linear Regression', handler: applyFuncToEachWithInput('linearRegression', 'Start source of regression at (example: 14:57 20150115)', {quote: true})}, {text: 'As Percent', handler: applyFuncToEachWithInput('asPercent', 'Please enter the value that corresponds to 100% or leave blank to use the total', {allowBlank: true})}, {text: 'Difference (of 2 series)', handler: applyFuncToAll('diffSeries')}, + {text: 'Difference (of series lists)', handler: applyFuncToAll('diffSeriesLists')}, {text: 'Ratio (of 2 series)', handler: applyFuncToAll('divideSeries')}, {text: 'Ratio (of series lists)', handler: applyFuncToAll('divideSeriesLists')}, {text: 'Exponential Moving Average', handler: applyFuncToEachWithInput('exponentialMovingAverage', 'EMA for the last __ data points')} diff --git a/webapp/content/js/dashboard.js b/webapp/content/js/dashboard.js index 1be7c640b..06cb7b932 100644 --- a/webapp/content/js/dashboard.js +++ b/webapp/content/js/dashboard.js @@ -368,8 +368,7 @@ function initDashboard () { enableKeyEvents: true, cls: 'completer-input-field', listeners: { - keypress: completerKeyPress, - specialkey: completerKeyPress, + keydown: completerKeyPress, afterrender: focusCompleter } }); @@ -1909,6 +1908,7 @@ function graphClicked(graphView, graphIndex, element, evt) { header: 'Target', dataIndex: 'target', width: gridWidth - 90, + renderer: 'htmlEncode', editor: {xtype: 'textfield'} }, { @@ -3009,8 +3009,8 @@ function setDashboardName(name) { document.title = name + ' - Graphite Dashboard'; changeHash(name); - navBar.setTitle(name + ' - (' + dashboardURL + ')'); - saveButton.setText('Save "' + name + '"'); + navBar.setTitle(htmlEncode(name + ' - (' + dashboardURL + ')')); + saveButton.setText(htmlEncode('Save "' + name + '"')); saveButton.enable(); } } @@ -3147,7 +3147,7 @@ function showDashboardFinder() { dashboardsList = new Ext.list.ListView({ columns: [ - {header: 'Dashboard', width: 1.0, dataIndex: 'name', sortable: false} + {header: 'Dashboard', width: 1.0, dataIndex: 'name', sortable: false, tpl:'{name:htmlEncode}'} ], columnSort: false, emptyText: 'No dashboards found', diff --git a/webapp/graphite/account/admin.py b/webapp/graphite/account/admin.py index 9c1454ddc..98669555a 100644 --- a/webapp/graphite/account/admin.py +++ b/webapp/graphite/account/admin.py @@ -3,8 +3,8 @@ class MyGraphAdmin(admin.ModelAdmin): - list_display = ('profile','name') - list_filter = ('profile',) + list_display = ('profile','name') + list_filter = ('profile',) admin.site.register(Profile) diff --git a/webapp/graphite/account/ldapBackend.py b/webapp/graphite/account/ldapBackend.py index cc897ed3c..53eee446a 100644 --- a/webapp/graphite/account/ldapBackend.py +++ b/webapp/graphite/account/ldapBackend.py @@ -20,52 +20,52 @@ class LDAPBackend: - def authenticate(self, username=None, password=None): - if settings.LDAP_USER_DN_TEMPLATE is not None: - settings.LDAP_BASE_USER = settings.LDAP_USER_DN_TEMPLATE % {'username': username} - settings.LDAP_BASE_PASS = password - try: - conn = ldap.initialize(settings.LDAP_URI) - conn.protocol_version = ldap.VERSION3 - if settings.LDAP_USE_TLS: - conn.start_tls_s() - conn.simple_bind_s( settings.LDAP_BASE_USER, settings.LDAP_BASE_PASS ) - except ldap.LDAPError: - traceback.print_exc() - return None + def authenticate(self, username=None, password=None): + if settings.LDAP_USER_DN_TEMPLATE is not None: + settings.LDAP_BASE_USER = settings.LDAP_USER_DN_TEMPLATE % {'username': username} + settings.LDAP_BASE_PASS = password + try: + conn = ldap.initialize(settings.LDAP_URI) + conn.protocol_version = ldap.VERSION3 + if settings.LDAP_USE_TLS: + conn.start_tls_s() + conn.simple_bind_s( settings.LDAP_BASE_USER, settings.LDAP_BASE_PASS ) + except ldap.LDAPError: + traceback.print_exc() + return None - scope = ldap.SCOPE_SUBTREE - filter = settings.LDAP_USER_QUERY % username - returnFields = ['dn','mail'] - try: - resultID = conn.search( settings.LDAP_SEARCH_BASE, scope, filter, returnFields ) - resultType, resultData = conn.result( resultID, 0 ) - if len(resultData) != 1: # User does not exist - return None + scope = ldap.SCOPE_SUBTREE + filter = settings.LDAP_USER_QUERY % username + returnFields = ['dn','mail'] + try: + resultID = conn.search( settings.LDAP_SEARCH_BASE, scope, filter, returnFields ) + resultType, resultData = conn.result( resultID, 0 ) + if len(resultData) != 1: # User does not exist + return None - userDN = resultData[0][0] - try: - userMail = resultData[0][1]['mail'][0].decode("utf-8") - except Exception: - userMail = "Unknown" + userDN = resultData[0][0] + try: + userMail = resultData[0][1]['mail'][0].decode("utf-8") + except Exception: + userMail = "Unknown" - conn.simple_bind_s(userDN,password) - try: - user = User.objects.get(username=username) - except Exception: # First time login, not in django's database - # To prevent login from django db user - randomPasswd = User.objects.make_random_password(length=16) - user = User.objects.create_user(username, userMail, randomPasswd) - user.save() + conn.simple_bind_s(userDN,password) + try: + user = User.objects.get(username=username) + except Exception: # First time login, not in django's database + # To prevent login from django db user + randomPasswd = User.objects.make_random_password(length=16) + user = User.objects.create_user(username, userMail, randomPasswd) + user.save() - return user + return user - except ldap.INVALID_CREDENTIALS: - traceback.print_exc() - return None + except ldap.INVALID_CREDENTIALS: + traceback.print_exc() + return None - def get_user(self,user_id): - try: - return User.objects.get(pk=user_id) - except User.DoesNotExist: - return None + def get_user(self,user_id): + try: + return User.objects.get(pk=user_id) + except User.DoesNotExist: + return None diff --git a/webapp/graphite/account/models.py b/webapp/graphite/account/models.py index d47d0a68a..2c81cd270 100644 --- a/webapp/graphite/account/models.py +++ b/webapp/graphite/account/models.py @@ -17,35 +17,35 @@ class Profile(models.Model): - user = models.OneToOneField(auth_models.User, on_delete=models.CASCADE) - history = models.TextField(default="") - advancedUI = models.BooleanField(default=False) - __str__ = lambda self: "Profile for %s" % self.user + user = models.OneToOneField(auth_models.User, on_delete=models.CASCADE) + history = models.TextField(default="") + advancedUI = models.BooleanField(default=False) + __str__ = lambda self: "Profile for %s" % self.user class Variable(models.Model): - profile = models.ForeignKey(Profile, on_delete=models.CASCADE) - name = models.CharField(max_length=64) - value = models.CharField(max_length=64) + profile = models.ForeignKey(Profile, on_delete=models.CASCADE) + name = models.CharField(max_length=64) + value = models.CharField(max_length=64) class View(models.Model): - profile = models.ForeignKey(Profile, on_delete=models.CASCADE) - name = models.CharField(max_length=64) + profile = models.ForeignKey(Profile, on_delete=models.CASCADE) + name = models.CharField(max_length=64) class Window(models.Model): - view = models.ForeignKey(View, on_delete=models.CASCADE) - name = models.CharField(max_length=64) - top = models.IntegerField() - left = models.IntegerField() - width = models.IntegerField() - height = models.IntegerField() - url = models.TextField() - interval = models.IntegerField(null=True) + view = models.ForeignKey(View, on_delete=models.CASCADE) + name = models.CharField(max_length=64) + top = models.IntegerField() + left = models.IntegerField() + width = models.IntegerField() + height = models.IntegerField() + url = models.TextField() + interval = models.IntegerField(null=True) class MyGraph(models.Model): - profile = models.ForeignKey(Profile, on_delete=models.CASCADE) - name = models.CharField(max_length=64) - url = models.TextField() + profile = models.ForeignKey(Profile, on_delete=models.CASCADE) + name = models.CharField(max_length=64) + url = models.TextField() diff --git a/webapp/graphite/account/views.py b/webapp/graphite/account/views.py index 918fa316b..06fe5c6b3 100644 --- a/webapp/graphite/account/views.py +++ b/webapp/graphite/account/views.py @@ -23,42 +23,42 @@ def loginView(request): - username = request.POST.get('username') - password = request.POST.get('password') - if request.method == 'GET': - nextPage = request.GET.get('nextPage', reverse('browser')) - else: - nextPage = request.POST.get('nextPage', reverse('browser')) - if username and password: - user = authenticate(username=username,password=password) - if user is None: - return render(request, "login.html",{'authenticationFailed' : True, 'nextPage' : nextPage}) - elif not user.is_active: - return render(request, "login.html",{'accountDisabled' : True, 'nextPage' : nextPage}) + username = request.POST.get('username') + password = request.POST.get('password') + if request.method == 'GET': + nextPage = request.GET.get('nextPage', reverse('browser')) + else: + nextPage = request.POST.get('nextPage', reverse('browser')) + if username and password: + user = authenticate(username=username,password=password) + if user is None: + return render(request, "login.html",{'authenticationFailed' : True, 'nextPage' : nextPage}) + elif not user.is_active: + return render(request, "login.html",{'accountDisabled' : True, 'nextPage' : nextPage}) + else: + login(request,user) + return HttpResponseRedirect(nextPage) else: - login(request,user) - return HttpResponseRedirect(nextPage) - else: - return render(request, "login.html",{'nextPage' : nextPage}) + return render(request, "login.html",{'nextPage' : nextPage}) def logoutView(request): - nextPage = request.GET.get('nextPage', reverse('browser')) - logout(request) - return HttpResponseRedirect(nextPage) + nextPage = request.GET.get('nextPage', reverse('browser')) + logout(request) + return HttpResponseRedirect(nextPage) def editProfile(request): - if not isAuthenticated(request.user): - return HttpResponseRedirect(reverse('browser')) - context = { 'profile' : getProfile(request) } - return render(request, "editProfile.html",context) + if not isAuthenticated(request.user): + return HttpResponseRedirect(reverse('browser')) + context = { 'profile' : getProfile(request) } + return render(request, "editProfile.html",context) def updateProfile(request): - profile = getProfile(request,allowDefault=False) - if profile: - profile.advancedUI = request.POST.get('advancedUI','off') == 'on' - profile.save() - nextPage = request.POST.get('nextPage', reverse('browser')) - return HttpResponseRedirect(nextPage) + profile = getProfile(request,allowDefault=False) + if profile: + profile.advancedUI = request.POST.get('advancedUI','off') == 'on' + profile.save() + nextPage = request.POST.get('nextPage', reverse('browser')) + return HttpResponseRedirect(nextPage) diff --git a/webapp/graphite/browser/views.py b/webapp/graphite/browser/views.py index 3bc9dc9ca..223a76afa 100644 --- a/webapp/graphite/browser/views.py +++ b/webapp/graphite/browser/views.py @@ -24,7 +24,6 @@ from graphite.util import json from graphite.logger import log from hashlib import md5 -from six.moves.urllib.parse import urlencode, urlparse, parse_qsl def header(request): @@ -138,19 +137,7 @@ def myGraphLookup(request): else: m = md5() m.update(name.encode('utf-8')) - - # Sanitize target - urlEscaped = str(graph.url) - graphUrl = urlparse(urlEscaped) - graphUrlParams = {} - graphUrlParams['target'] = [] - for param in parse_qsl(graphUrl.query): - if param[0] != 'target': - graphUrlParams[param[0]] = param[1] - else: - graphUrlParams[param[0]].append(escape(param[1])) - urlEscaped = graphUrl._replace(query=urlencode(graphUrlParams, True)).geturl() - node.update( { 'id' : str(userpath_prefix + m.hexdigest()), 'graphUrl' : urlEscaped } ) + node.update( { 'id' : str(userpath_prefix + m.hexdigest()), 'graphUrl' : graph.url } ) node.update(leafNode) nodes.append(node) @@ -237,22 +224,10 @@ def userGraphLookup(request): m = md5() m.update(nodeName.encode('utf-8')) - # Sanitize target - urlEscaped = str(graph.url) - graphUrl = urlparse(urlEscaped) - graphUrlParams = {} - graphUrlParams['target'] = [] - for param in parse_qsl(graphUrl.query): - if param[0] != 'target': - graphUrlParams[param[0]] = param[1] - else: - graphUrlParams[param[0]].append(escape(param[1])) - urlEscaped = graphUrl._replace(query=urlencode(graphUrlParams, True)).geturl() - node = { 'text' : escape(nodeName), 'id' : username + '.' + prefix + m.hexdigest(), - 'graphUrl' : urlEscaped, + 'graphUrl' : graph.url, } node.update(leafNode) diff --git a/webapp/graphite/carbonlink.py b/webapp/graphite/carbonlink.py index 378d0402f..a9bdbc10e 100644 --- a/webapp/graphite/carbonlink.py +++ b/webapp/graphite/carbonlink.py @@ -12,190 +12,190 @@ try: - import six.moves.cPickle as pickle + import six.moves.cPickle as pickle except ImportError: - import pickle + import pickle def load_keyfunc(): - if settings.CARBONLINK_HASHING_KEYFUNC: - module_path, func_name = settings.CARBONLINK_HASHING_KEYFUNC.rsplit(':', 1) - log.cache("Using keyfunc %s found in %s" % (str(func_name), str(module_path))) - return load_module(module_path, member=func_name) - else: - return lambda x: x + if settings.CARBONLINK_HASHING_KEYFUNC: + module_path, func_name = settings.CARBONLINK_HASHING_KEYFUNC.rsplit(':', 1) + log.cache("Using keyfunc %s found in %s" % (str(func_name), str(module_path))) + return load_module(module_path, member=func_name) + else: + return lambda x: x class CarbonLinkRequestError(Exception): - pass + pass class CarbonLinkPool(object): - def __init__(self, hosts, timeout): - self.hosts = [ (server, instance) for (server, port, instance) in hosts ] - self.ports = dict( - ((server, instance), port) for (server, port, instance) in hosts ) - self.timeout = float(timeout) - servers = set([server for (server, port, instance) in hosts]) - if len(servers) < settings.REPLICATION_FACTOR: - raise Exception("REPLICATION_FACTOR=%d cannot exceed servers=%d" % ( - settings.REPLICATION_FACTOR, len(servers))) - - self.hash_ring = ConsistentHashRing( - self.hosts, hash_type=settings.CARBONLINK_HASHING_TYPE) - self.keyfunc = load_keyfunc() - self.connections = {} - self.last_failure = {} - # Create a connection pool for each host - for host in self.hosts: - self.connections[host] = set() - - def select_host(self, metric): - "Returns the carbon host that has data for the given metric" - key = self.keyfunc(metric) - nodes = [] - servers = set() - for node in self.hash_ring.get_nodes(key): - (server, instance) = node - if server in servers: - continue - servers.add(server) - nodes.append(node) - if len(servers) >= settings.REPLICATION_FACTOR: - break - - available = [ n for n in nodes if self.is_available(n) ] - return random.choice(available or nodes) - - def is_available(self, host): - now = time.time() - last_fail = self.last_failure.get(host, 0) - return (now - last_fail) < settings.CARBONLINK_RETRY_DELAY - - def get_connection(self, host): - # First try to take one out of the pool for this host - (server, instance) = host - port = self.ports[host] - connectionPool = self.connections[host] - try: - return connectionPool.pop() - except KeyError: - pass #nothing left in the pool, gotta make a new connection - - log.cache("CarbonLink creating a new socket for %s" % str(host)) - try: - connection = socket.create_connection((server, port), self.timeout) - except socket.error: - self.last_failure[host] = time.time() - raise - else: - connection.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - return connection - - def query(self, metric): - request = dict(type='cache-query', metric=metric) - results = self.send_request(request) - log.cache("CarbonLink cache-query request for %s returned %d datapoints" % ( - metric, len(results['datapoints']))) - return results['datapoints'] - - def get_metadata(self, metric, key): - request = dict(type='get-metadata', metric=metric, key=key) - results = self.send_request(request) - log.cache("CarbonLink get-metadata request received for %s:%s" % (metric, key)) - return results['value'] - - def set_metadata(self, metric, key, value): - request = dict(type='set-metadata', metric=metric, key=key, value=value) - results = self.send_request(request) - log.cache("CarbonLink set-metadata request received for %s:%s" % (metric, key)) - return results - - def send_request(self, request): - metric = request['metric'] - serialized_request = pickle.dumps(request, protocol=-1) - len_prefix = struct.pack("!L", len(serialized_request)) - request_packet = len_prefix + serialized_request - result = {} - result.setdefault('datapoints', []) - - if metric.startswith(settings.CARBON_METRIC_PREFIX): - return self.send_request_to_all(request) - - if not self.hosts: - log.cache("CarbonLink is not connected to any host. Returning empty nodes list") - return result - - host = self.select_host(metric) - conn = self.get_connection(host) - log.cache("CarbonLink sending request for %s to %s" % (metric, str(host))) - try: - conn.sendall(request_packet) - result = self.recv_response(conn) - except Exception as e: - self.last_failure[host] = time.time() - log.cache("Exception getting data from cache %s: %s" % (str(host), e)) - else: - self.connections[host].add(conn) - if 'error' in result: - log.cache("Error getting data from cache: %s" % result['error']) - raise CarbonLinkRequestError(result['error']) - log.cache("CarbonLink finished receiving %s from %s" % (str(metric), str(host))) - return result - - def send_request_to_all(self, request): - metric = request['metric'] - serialized_request = pickle.dumps(request, protocol=-1) - len_prefix = struct.pack("!L", len(serialized_request)) - request_packet = len_prefix + serialized_request - results = {} - results.setdefault('datapoints', {}) - - for host in self.hosts: - conn = self.get_connection(host) - log.cache("CarbonLink sending request for %s to %s" % (metric, str(host))) - try: - conn.sendall(request_packet) - result = self.recv_response(conn) - except Exception as e: - self.last_failure[host] = time.time() - log.cache("Exception getting data from cache %s: %s" % (str(host), e)) - else: - self.connections[host].add(conn) - if 'error' in result: - log.cache("Error getting data from cache %s: %s" % (str(host), result['error'])) + def __init__(self, hosts, timeout): + self.hosts = [ (server, instance) for (server, port, instance) in hosts ] + self.ports = { (server, instance): port for (server, port, instance) in hosts } + self.timeout = float(timeout) + servers = set([server for (server, port, instance) in hosts]) + if len(servers) < settings.REPLICATION_FACTOR: + raise Exception("REPLICATION_FACTOR=%d cannot exceed servers=%d" % ( + settings.REPLICATION_FACTOR, len(servers))) + + self.hash_ring = ConsistentHashRing( + self.hosts, hash_type=settings.CARBONLINK_HASHING_TYPE) + self.keyfunc = load_keyfunc() + self.connections = {} + self.last_failure = {} + # Create a connection pool for each host + for host in self.hosts: + self.connections[host] = set() + + def select_host(self, metric): + "Returns the carbon host that has data for the given metric" + key = self.keyfunc(metric) + nodes = [] + servers = set() + for node in self.hash_ring.get_nodes(key): + (server, instance) = node + if server in servers: + continue + servers.add(server) + nodes.append(node) + if len(servers) >= settings.REPLICATION_FACTOR: + break + + available = [ n for n in nodes if self.is_available(n) ] + return random.choice(available or nodes) + + def is_available(self, host): + now = time.time() + last_fail = self.last_failure.get(host, 0) + return (now - last_fail) < settings.CARBONLINK_RETRY_DELAY + + def get_connection(self, host): + # First try to take one out of the pool for this host + (server, instance) = host + port = self.ports[host] + connectionPool = self.connections[host] + try: + return connectionPool.pop() + except KeyError: + pass #nothing left in the pool, gotta make a new connection + + log.cache("CarbonLink creating a new socket for %s" % str(host)) + try: + connection = socket.create_connection((server, port), self.timeout) + except socket.error: + self.last_failure[host] = time.time() + raise else: - if len(result['datapoints']) > 1: - results['datapoints'].update(result['datapoints']) - log.cache("CarbonLink finished receiving %s from %s" % (str(metric), str(host))) - return results - - def recv_response(self, conn): - len_prefix = self.recv_exactly(conn, 4) - body_size = struct.unpack("!L", len_prefix)[0] - body = self.recv_exactly(conn, body_size) - return unpickle.loads(body) - - @staticmethod - def recv_exactly(conn, num_bytes): - buf = b'' - while len(buf) < num_bytes: - data = conn.recv(num_bytes - len(buf)) - if not data: - raise Exception("Connection lost") - buf += data - - return buf + connection.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + return connection + + def query(self, metric): + request = dict(type='cache-query', metric=metric) + results = self.send_request(request) + log.cache("CarbonLink cache-query request for %s returned %d datapoints" % ( + metric, len(results['datapoints']))) + return results['datapoints'] + + def get_metadata(self, metric, key): + request = dict(type='get-metadata', metric=metric, key=key) + results = self.send_request(request) + log.cache("CarbonLink get-metadata request received for %s:%s" % (metric, key)) + return results['value'] + + def set_metadata(self, metric, key, value): + request = dict(type='set-metadata', metric=metric, key=key, value=value) + results = self.send_request(request) + log.cache("CarbonLink set-metadata request received for %s:%s" % (metric, key)) + return results + + def send_request(self, request): + metric = request['metric'] + serialized_request = pickle.dumps(request, protocol=settings.CARBONLINK_PICKLE_PROTOCOL) + len_prefix = struct.pack("!L", len(serialized_request)) + request_packet = len_prefix + serialized_request + result = {} + result.setdefault('datapoints', []) + + if metric.startswith(settings.CARBON_METRIC_PREFIX): + return self.send_request_to_all(request) + + if not self.hosts: + log.cache("CarbonLink is not connected to any host. Returning empty nodes list") + return result + + host = self.select_host(metric) + conn = self.get_connection(host) + log.cache("CarbonLink sending request for %s to %s" % (metric, str(host))) + try: + conn.sendall(request_packet) + result = self.recv_response(conn) + except Exception as e: + self.last_failure[host] = time.time() + log.cache("Exception getting data from cache %s: %s" % (str(host), e)) + else: + self.connections[host].add(conn) + if 'error' in result: + log.cache("Error getting data from cache: %s" % result['error']) + raise CarbonLinkRequestError(result['error']) + log.cache("CarbonLink finished receiving %s from %s" % (str(metric), str(host))) + return result + + def send_request_to_all(self, request): + metric = request['metric'] + serialized_request = pickle.dumps(request, protocol=settings.CARBONLINK_PICKLE_PROTOCOL) + len_prefix = struct.pack("!L", len(serialized_request)) + request_packet = len_prefix + serialized_request + results = {} + results.setdefault('datapoints', {}) + + for host in self.hosts: + conn = self.get_connection(host) + log.cache("CarbonLink sending request for %s to %s" % (metric, str(host))) + try: + conn.sendall(request_packet) + result = self.recv_response(conn) + except Exception as e: + self.last_failure[host] = time.time() + log.cache("Exception getting data from cache %s: %s" % (str(host), e)) + else: + self.connections[host].add(conn) + if 'error' in result: + log.cache("Error getting data from cache %s: %s" % (str(host), result['error'])) + else: + if len(result['datapoints']) > 1: + results['datapoints'].update(result['datapoints']) + log.cache("CarbonLink finished receiving %s from %s" % (str(metric), str(host))) + + return results + + def recv_response(self, conn): + len_prefix = self.recv_exactly(conn, 4) + body_size = struct.unpack("!L", len_prefix)[0] + body = self.recv_exactly(conn, body_size) + return unpickle.loads(body) + + @staticmethod + def recv_exactly(conn, num_bytes): + buf = b'' + while len(buf) < num_bytes: + data = conn.recv(num_bytes - len(buf)) + if not data: + raise Exception("Connection lost") + buf += data + + return buf @ThreadSafeSingleton class GlobalCarbonLinkPool(CarbonLinkPool): - def __init__(self): - hosts = parseHosts(settings.CARBONLINK_HOSTS) - timeout = settings.CARBONLINK_TIMEOUT - CarbonLinkPool.__init__(self, hosts, timeout) + def __init__(self): + hosts = parseHosts(settings.CARBONLINK_HOSTS) + timeout = settings.CARBONLINK_TIMEOUT + CarbonLinkPool.__init__(self, hosts, timeout) def CarbonLink(): - """Handy accessor for the global singleton.""" - return GlobalCarbonLinkPool.instance() + """Handy accessor for the global singleton.""" + return GlobalCarbonLinkPool.instance() diff --git a/webapp/graphite/composer/views.py b/webapp/graphite/composer/views.py index 5de758700..f60c37bf6 100644 --- a/webapp/graphite/composer/views.py +++ b/webapp/graphite/composer/views.py @@ -24,60 +24,60 @@ def composer(request): - profile = getProfile(request) - context = { - 'queryString' : request.GET.urlencode().replace('+','%20'), - 'showTarget' : request.GET.get('showTarget',''), - 'user' : request.user, - 'profile' : profile, - 'showMyGraphs' : int( profile.user.username != 'default' ), - 'searchEnabled' : int( os.access(settings.INDEX_FILE, os.R_OK) ), - 'refreshInterval': settings.AUTO_REFRESH_INTERVAL, - 'debug' : settings.DEBUG, - 'jsdebug' : settings.DEBUG, - } - return render(request, "composer.html",context) + profile = getProfile(request) + context = { + 'queryString' : request.GET.urlencode().replace('+','%20'), + 'showTarget' : request.GET.get('showTarget',''), + 'user' : request.user, + 'profile' : profile, + 'showMyGraphs' : int( profile.user.username != 'default' ), + 'searchEnabled' : int( os.access(settings.INDEX_FILE, os.R_OK) ), + 'refreshInterval': settings.AUTO_REFRESH_INTERVAL, + 'debug' : settings.DEBUG, + 'jsdebug' : settings.DEBUG, + } + return render(request, "composer.html", context) def mygraph(request): - profile = getProfile(request, allowDefault=False) + profile = getProfile(request, allowDefault=False) - if not profile: - return HttpResponse( "You are not logged in!" ) + if not profile: + return HttpResponse("You are not logged in!") - action = request.GET['action'] - graphName = request.GET['graphName'] + action = request.GET['action'] + graphName = request.GET['graphName'] - if not graphName: - return HttpResponse("You must type in a graph name.") + if not graphName: + return HttpResponse("You must type in a graph name.") - if action == 'save': - url = request.GET['url'] + if action == 'save': + url = request.GET['url'] - try: - existingGraph = profile.mygraph_set.get(name=graphName) - existingGraph.url = url - existingGraph.save() + try: + existingGraph = profile.mygraph_set.get(name=graphName) + existingGraph.url = url + existingGraph.save() - except ObjectDoesNotExist: - try: - newGraph = MyGraph(profile=profile,name=graphName,url=url) - newGraph.save() - except Exception: - log.exception("Failed to create new MyGraph in /composer/mygraph/, graphName=%s" % graphName) - return HttpResponse("Failed to save graph %s" % graphName) + except ObjectDoesNotExist: + try: + newGraph = MyGraph(profile=profile,name=graphName,url=url) + newGraph.save() + except Exception: + log.exception("Failed to create new MyGraph in /composer/mygraph/, graphName=%s" % graphName) + return HttpResponse("Failed to save graph %s" % graphName) - return HttpResponse("SAVED") + return HttpResponse("SAVED") - elif action == 'delete': - try: - existingGraph = profile.mygraph_set.get(name=graphName) - existingGraph.delete() + elif action == 'delete': + try: + existingGraph = profile.mygraph_set.get(name=graphName) + existingGraph.delete() - except ObjectDoesNotExist: - return HttpResponse("No such graph '%s'" % graphName) + except ObjectDoesNotExist: + return HttpResponse("No such graph '%s'" % graphName) - return HttpResponse("DELETED") + return HttpResponse("DELETED") - else: - return HttpResponse("Invalid operation '%s'" % action) + else: + return HttpResponse("Invalid operation '%s'" % action) diff --git a/webapp/graphite/dashboard/models.py b/webapp/graphite/dashboard/models.py index 2f8ee35e9..357820c2a 100644 --- a/webapp/graphite/dashboard/models.py +++ b/webapp/graphite/dashboard/models.py @@ -5,39 +5,39 @@ class Dashboard(models.Model): - name = models.CharField(primary_key=True, max_length=128) - owners = models.ManyToManyField(Profile, related_name='dashboards') - state = models.TextField() - __str__ = lambda self: "Dashboard [%s]" % self.name + name = models.CharField(primary_key=True, max_length=128) + owners = models.ManyToManyField(Profile, related_name='dashboards') + state = models.TextField() + __str__ = lambda self: "Dashboard [%s]" % self.name class Template(models.Model): - class Admin: pass - name = models.CharField(primary_key=True, max_length=128) - owners = models.ManyToManyField(Profile, related_name='templates') - state = models.TextField() - __str__ = lambda self: "Template [%s]" % self.name - - def loadState(self, val): - return self.state.replace('__VALUE__', val) - - def setState(self, state, key): - #XXX Might not need this - def replace_string(s): - if isinstance(s, six.text_type): - s = s.replace(key, '__VALUE__') - return s - - def update_graph(graph): - graph_opts = graph[1] - graph_opts['target'] = [replace_string(s) for s in graph_opts['target']] - return [replace_string(graph[0]), - graph_opts, - replace_string(graph[2])] - - # Parse JSON here and replace first five elements of target with __VALUE__ - parsed_state = json.loads(state) - for i, graph in enumerate(parsed_state['graphs']): - parsed_state['graphs'][i] = update_graph(graph) - self.state = json.dumps(parsed_state) + class Admin: pass + name = models.CharField(primary_key=True, max_length=128) + owners = models.ManyToManyField(Profile, related_name='templates') + state = models.TextField() + __str__ = lambda self: "Template [%s]" % self.name + + def loadState(self, val): + return self.state.replace('__VALUE__', val) + + def setState(self, state, key): + #XXX Might not need this + def replace_string(s): + if isinstance(s, six.text_type): + s = s.replace(key, '__VALUE__') + return s + + def update_graph(graph): + graph_opts = graph[1] + graph_opts['target'] = [replace_string(s) for s in graph_opts['target']] + return [replace_string(graph[0]), + graph_opts, + replace_string(graph[2])] + + # Parse JSON here and replace first five elements of target with __VALUE__ + parsed_state = json.loads(state) + for i, graph in enumerate(parsed_state['graphs']): + parsed_state['graphs'][i] = update_graph(graph) + self.state = json.dumps(parsed_state) diff --git a/webapp/graphite/errors.py b/webapp/graphite/errors.py index 5d3a6618a..289541798 100644 --- a/webapp/graphite/errors.py +++ b/webapp/graphite/errors.py @@ -1,21 +1,107 @@ from django.http import HttpResponseBadRequest +from graphite.logger import log class NormalizeEmptyResultError(Exception): - # throw error for normalize() when empty - pass + # throw error for normalize() when empty + pass class InputParameterError(ValueError): - pass + + def __init__(self, *args, **kwargs): + super(InputParameterError, self).__init__(*args, **kwargs) + self.context = {} + + def setSourceIdHeaders(self, newHeaders): + headers = self.context.get('sourceIdHeaders', {}) + headers.update(newHeaders) + self.context['sourceIdHeaders'] = headers + + @property + def sourceIdHeaders(self): + sourceIdHeaders = self.context.get('sourceIdHeaders', {}) + headers = list(sourceIdHeaders.keys()) + headers.sort() + source = '' + + for name in headers: + if source: + source += ', ' + source += '{name}: {value}'.format( + name=name, + value=sourceIdHeaders[name]) + + return source + + def setTargets(self, targets): + self.context['targets'] = targets + + @property + def targets(self): + return ', '.join(self.context.get('targets', [])) + + def setFunction(self, name, args, kwargs): + self.context['function'] = { + 'name': name, + 'args': args, + 'kwargs': kwargs, + } + + @property + def function(self): + func = self.context.get('function', None) + if not func: + return '' + + funcName = func.get('name', '') + if not funcName: + return '' + + kwargs = func.get('kwargs', {}) + kwargKeys = list(kwargs.keys()) + + # keep kwargs sorted in message, for consistency and testability + kwargKeys.sort() + + # generate string of args and kwargs + args = ', '.join( + argList + for argList in [ + ', '.join(repr(arg) for arg in func.get('args', [])), + ', '.join('{k}={v}'.format(k=str(k), v=repr(kwargs[k])) for k in kwargKeys), + ] if argList + ) + + return '{func}({args})'.format(func=funcName, args=args) + + def __str__(self): + msg = 'Invalid parameters ({msg})'.format(msg=str(super(InputParameterError, self).__str__())) + targets = self.targets + if targets: + msg += '; targets: "{targets}"'.format(targets=targets) + + source = self.sourceIdHeaders + if source: + msg += '; source: "{source}"'.format(source=source) + + # needs to be last because the string "args" may potentially be thousands + # of chars long after expanding the globbing patterns + func = self.function + if func: + msg += '; func: "{func}"'.format(func=func) + + return msg # decorator which turns InputParameterExceptions into Django's HttpResponseBadRequest def handleInputParameterError(f): def new_f(*args, **kwargs): - try: - return f(*args, **kwargs) - except InputParameterError as e: - return HttpResponseBadRequest('Bad Request: {err}'.format(err=e)) + try: + return f(*args, **kwargs) + except InputParameterError as e: + msgStr = str(e) + log.warning('%s', msgStr) + return HttpResponseBadRequest(msgStr) return new_f diff --git a/webapp/graphite/events/views.py b/webapp/graphite/events/views.py index 47c0cade8..4a19ffab6 100644 --- a/webapp/graphite/events/views.py +++ b/webapp/graphite/events/views.py @@ -37,11 +37,11 @@ def view_events(request): @jsonResponse(encoder=DjangoJSONEncoder) def jsonDetail(request, queryParams, event_id): try: - e = Event.objects.get(id=event_id) - e.tags = e.tags.split() - return model_to_dict(e) + e = Event.objects.get(id=event_id) + e.tags = e.tags.split() + return model_to_dict(e) except ObjectDoesNotExist: - raise HttpError('Event matching query does not exist', status=404) + raise HttpError('Event matching query does not exist', status=404) def detail(request, event_id): diff --git a/webapp/graphite/finders/__init__.py b/webapp/graphite/finders/__init__.py index 0470c7e39..4fc8c35da 100644 --- a/webapp/graphite/finders/__init__.py +++ b/webapp/graphite/finders/__init__.py @@ -2,7 +2,7 @@ import os.path import re -EXPAND_BRACES_RE = re.compile(r'.*(\{.*?[^\\]?\})') +EXPAND_BRACES_RE = re.compile(r'(\{([^\{\}]*)\})') def get_real_metric_path(absolute_path, metric_path): @@ -11,8 +11,8 @@ def get_real_metric_path(absolute_path, metric_path): if absolute_path != real_absolute_path: # replace left side base_fs_path that contains sym link with real fs path relative_fs_path = metric_path.replace('.', os.sep) - real_absolute_path_no_ext, _ext = os.path.splitext(real_absolute_path) - base_fs_path = os.path.dirname(real_absolute_path_no_ext[:-len(relative_fs_path)]) + absolute_path_no_ext, _ext = os.path.splitext(absolute_path) + base_fs_path = os.path.dirname(absolute_path_no_ext[:-len(relative_fs_path)]) real_base_fs_path = os.path.realpath(base_fs_path) real_relative_fs_path = real_absolute_path[len(real_base_fs_path):].lstrip(os.sep) return fs_to_metric(real_relative_fs_path) @@ -56,25 +56,13 @@ def match_entries(entries, pattern): def expand_braces(s): res = list() - # Used instead of s.strip('{}') because strip is greedy. - # We want to remove only ONE leading { and ONE trailing }, if both exist - def remove_outer_braces(s): - if s[0] == '{' and s[-1] == '}': - return s[1:-1] - return s - m = EXPAND_BRACES_RE.search(s) if m is not None: - sub = m.group(1) + sub = m.group(2) open_brace, close_brace = m.span(1) - if ',' in sub: - for pat in sub.strip('{}').split(','): - res.extend(expand_braces( - s[:open_brace] + pat + s[close_brace:])) - else: - res.extend(expand_braces( - s[:open_brace] + remove_outer_braces(sub) + s[close_brace:])) + for pat in sub.split(','): + res.extend(expand_braces(s[:open_brace] + pat + s[close_brace:])) else: - res.append(s.replace('\\}', '}')) + res.append(s) return list(set(res)) diff --git a/webapp/graphite/finders/standard.py b/webapp/graphite/finders/standard.py index 117338e4a..7755db27a 100644 --- a/webapp/graphite/finders/standard.py +++ b/webapp/graphite/finders/standard.py @@ -34,12 +34,12 @@ def find_nodes(self, query): # translate query pattern if it is tagged tagged = not query.pattern.startswith('_tagged.') and ';' in query.pattern if tagged: - # tagged series are stored in whisper using encoded names, so to retrieve them we need to - # encode the query pattern using the same scheme used in carbon when they are written. - encoded_paths = [ - TaggedSeries.encode(query.pattern, sep=os.sep, hash_only=True), - TaggedSeries.encode(query.pattern, sep=os.sep, hash_only=False), - ] + # tagged series are stored in whisper using encoded names, so to retrieve them we need to + # encode the query pattern using the same scheme used in carbon when they are written. + encoded_paths = [ + TaggedSeries.encode(query.pattern, sep=os.sep, hash_only=True), + TaggedSeries.encode(query.pattern, sep=os.sep, hash_only=False), + ] pattern_parts = clean_pattern.split('.') @@ -72,15 +72,15 @@ def find_nodes(self, query): # if we're finding by tag, return the proper metric path if tagged: - metric_path = query.pattern - real_metric_path = query.pattern + metric_path = query.pattern + real_metric_path = query.pattern else: - metric_path = fs_to_metric(relative_path) - real_metric_path = get_real_metric_path(absolute_path, metric_path) - metric_path_parts = metric_path.split('.') - for field_index in find_escaped_pattern_fields(query.pattern): - metric_path_parts[field_index] = pattern_parts[field_index].replace('\\', '') - metric_path = '.'.join(metric_path_parts) + metric_path = fs_to_metric(relative_path) + real_metric_path = get_real_metric_path(absolute_path, metric_path) + metric_path_parts = metric_path.split('.') + for field_index in find_escaped_pattern_fields(query.pattern): + metric_path_parts[field_index] = pattern_parts[field_index].replace('\\', '') + metric_path = '.'.join(metric_path_parts) # Now we construct and yield an appropriate Node object if isdir(absolute_path): @@ -180,26 +180,26 @@ def get_index(self, requestContext): matches = [] for root, _, files in walk(settings.WHISPER_DIR): - root = root.replace(settings.WHISPER_DIR, '') - for base_name in files: - if fnmatch.fnmatch(base_name, '*.wsp'): - match = join(root, base_name).replace('.wsp', '').replace('/', '.').lstrip('.') - bisect.insort_left(matches, match) + root = root.replace(settings.WHISPER_DIR, '') + for base_name in files: + if fnmatch.fnmatch(base_name, '*.wsp'): + match = join(root, base_name).replace('.wsp', '').replace('/', '.').lstrip('.') + bisect.insort_left(matches, match) # unlike 0.9.x, we're going to use os.walk with followlinks # since we require Python 2.7 and newer that supports it if RRDReader.supported: - for root, _, files in walk(settings.RRD_DIR, followlinks=True): - root = root.replace(settings.RRD_DIR, '') - for base_name in files: - if fnmatch.fnmatch(base_name, '*.rrd'): - absolute_path = join(settings.RRD_DIR, root, base_name) - base_name = splitext(base_name)[0] - metric_path = join(root, base_name) - rrd = RRDReader(absolute_path, metric_path) - for datasource_name in rrd.get_datasources(absolute_path): - match = join(metric_path, datasource_name).replace('.rrd', '').replace('/', '.').lstrip('.') - if match not in matches: - bisect.insort_left(matches, match) + for root, _, files in walk(settings.RRD_DIR, followlinks=True): + root = root.replace(settings.RRD_DIR, '') + for base_name in files: + if fnmatch.fnmatch(base_name, '*.rrd'): + absolute_path = join(settings.RRD_DIR, root, base_name) + base_name = splitext(base_name)[0] + metric_path = join(root, base_name) + rrd = RRDReader(absolute_path, metric_path) + for datasource_name in rrd.get_datasources(absolute_path): + match = join(metric_path, datasource_name).replace('.rrd', '').replace('/', '.').lstrip('.') + if match not in matches: + bisect.insort_left(matches, match) return matches diff --git a/webapp/graphite/functions/__init__.py b/webapp/graphite/functions/__init__.py index b59e6683b..f98daf7c5 100644 --- a/webapp/graphite/functions/__init__.py +++ b/webapp/graphite/functions/__init__.py @@ -18,90 +18,91 @@ def loadFunctions(force=False): - if _SeriesFunctions and not force: - return + if _SeriesFunctions and not force: + return - from graphite.render import functions + from graphite.render import functions - _SeriesFunctions.clear() - _SeriesFunctions.update(functions.SeriesFunctions) + _SeriesFunctions.clear() + _SeriesFunctions.update(functions.SeriesFunctions) - _PieFunctions.clear() - _PieFunctions.update(functions.PieFunctions) + _PieFunctions.clear() + _PieFunctions.update(functions.PieFunctions) - custom_modules = [] - for filename in listdir(customDir): - module_name, extension = splitext(filename) - if extension != '.py' or module_name == '__init__': - continue - custom_modules.append(customModPrefix + module_name) + custom_modules = [] + for filename in listdir(customDir): + module_name, extension = splitext(filename) + if extension != '.py' or module_name == '__init__': + continue + custom_modules.append(customModPrefix + module_name) - for module_name in custom_modules + settings.FUNCTION_PLUGINS: - try: - module = import_module(module_name) - except Exception as e: - log.warning('Error loading function plugin %s: %s' % (module_name, e)) - continue + for module_name in custom_modules + settings.FUNCTION_PLUGINS: + try: + module = import_module(module_name) + except Exception as e: + log.warning('Error loading function plugin %s: %s' % (module_name, e)) + continue - for func_name, func in getattr(module, 'SeriesFunctions', {}).items(): - try: - addFunction(_SeriesFunctions, func, func_name) - except Exception as e: - log.warning('Error loading function plugin %s: %s' % (module_name, e)) + for func_name, func in getattr(module, 'SeriesFunctions', {}).items(): + try: + addFunction(_SeriesFunctions, func, func_name) + except Exception as e: + log.warning('Error loading function plugin %s: %s' % (module_name, e)) - for func_name, func in getattr(module, 'PieFunctions', {}).items(): - try: - addFunction(_PieFunctions, func, func_name) - except Exception as e: - log.warning('Error loading function plugin %s: %s' % (module_name, e)) + for func_name, func in getattr(module, 'PieFunctions', {}).items(): + try: + addFunction(_PieFunctions, func, func_name) + except Exception as e: + log.warning('Error loading function plugin %s: %s' % (module_name, e)) def addFunction(dest, func, func_name): - if not hasattr(func, 'group'): - func.group = 'Ungrouped' + if not hasattr(func, 'group'): + func.group = 'Ungrouped' - if not hasattr(func, 'params'): - raise Exception('No params defined for %s' % func_name) + if not hasattr(func, 'params'): + raise Exception('No params defined for %s' % func_name) - for param in func.params: - if not isinstance(param, Param): - raise Exception('Invalid param specified for %s' % func_name) - dest[func_name] = func + for param in func.params: + if not isinstance(param, Param): + raise Exception('Invalid param specified for %s' % func_name) + dest[func_name] = func def SeriesFunctions(): - loadFunctions() - return _SeriesFunctions + loadFunctions() + return _SeriesFunctions def SeriesFunction(name): - loadFunctions() - try: - return _SeriesFunctions[name] - except KeyError: - raise KeyError('Function "%s" not found' % name) + loadFunctions() + try: + return _SeriesFunctions[name] + except KeyError: + raise KeyError('Function "%s" not found' % name) def PieFunctions(): - loadFunctions() - return _PieFunctions + loadFunctions() + return _PieFunctions def PieFunction(name): - loadFunctions() - try: - return _PieFunctions[name] - except KeyError: - raise KeyError('Function "%s" not found' % name) + loadFunctions() + try: + return _PieFunctions[name] + except KeyError: + raise KeyError('Function "%s" not found' % name) def functionInfo(name, func): - argspec = inspect.getargspec(func) - return { - 'name': name, - 'function': name + inspect.formatargspec(argspec[0][1:], argspec[1], argspec[2], argspec[3]), - 'description': inspect.getdoc(func), - 'module': inspect.getmodule(func).__name__, - 'group': getattr(func, 'group', 'Ungrouped'), - 'params': getattr(func, 'params', None), - } + argspec = inspect.getargspec(func) + argformat = inspect.formatargspec(argspec[0][1:], argspec[1], argspec[2], argspec[3]) + return { + 'name': name, + 'function': name + argformat, + 'description': inspect.getdoc(func), + 'module': inspect.getmodule(func).__name__, + 'group': getattr(func, 'group', 'Ungrouped'), + 'params': getattr(func, 'params', None), + } diff --git a/webapp/graphite/functions/aggfuncs.py b/webapp/graphite/functions/aggfuncs.py index e9d1e5f77..566762c87 100644 --- a/webapp/graphite/functions/aggfuncs.py +++ b/webapp/graphite/functions/aggfuncs.py @@ -27,8 +27,8 @@ def getAggFunc(func, rawFunc=None): - if func in aggFuncs: - return aggFuncs[func] - if func in aggFuncAliases: - return aggFuncAliases[func] - raise InputParameterError('Unsupported aggregation function: %s' % (rawFunc or func)) + if func in aggFuncs: + return aggFuncs[func] + if func in aggFuncAliases: + return aggFuncAliases[func] + raise InputParameterError('Unsupported aggregation function: %s' % (rawFunc or func)) diff --git a/webapp/graphite/functions/params.py b/webapp/graphite/functions/params.py index c041395cf..5a90c3811 100644 --- a/webapp/graphite/functions/params.py +++ b/webapp/graphite/functions/params.py @@ -4,57 +4,106 @@ from graphite.render.attime import parseTimeOffset from graphite.logger import log from graphite.functions.aggfuncs import aggFuncs, aggFuncAliases +from graphite.render.datalib import TimeSeries class ParamTypes(object): - pass + pass class ParamType(object): - options = [] + options = [] - def __init__(self, name, validator=None): - self.name = name - self.validator = validator + def __init__(self, name, validator=None): + self.name = name + self.validator = validator - @classmethod - def register(cls, name, *args): - setattr(ParamTypes, name, cls(name, *args)) + @classmethod + def register(cls, name, *args): + setattr(ParamTypes, name, cls(name, *args)) - def isValid(self, value): - if self.validator is None: - # if there's no validator for the type we assume True - return True + def isValid(self, value): + if self.validator is None: + # if there's no validator for the type we assume True + return value - return self.validator(value) + return self.validator(value) def validateBoolean(value): - return isinstance(value, bool) + if isinstance(value, six.string_types): + if value.lower() == 'true': + return True + if value.lower() == 'false': + return False + raise ValueError('Invalid boolean value: {value}'.format(value=repr(value))) + + if type(value) in [int, float]: + if value == 0: + return False + if value == 1: + return True + raise ValueError('Invalid boolean value: {value}'.format(value=repr(value))) + + return bool(value) def validateFloat(value): - return isinstance(value, float) or validateInteger(value) + return float(value) def validateInteger(value): - return isinstance(value, six.integer_types) + # prevent that float values get converted to int, because an + # error is better than silently falsifying the result + if type(value) is float: + raise ValueError('Not a valid integer value: {value}'.format(value=repr(value))) + + return int(value) def validateIntOrInf(value): - return validateInteger(value) or value == float('inf') + try: + return validateInteger(value) + except (TypeError, ValueError): + pass + + try: + inf = float('inf') + if float(value) == inf: + return inf + except (TypeError, ValueError, OverflowError): + pass + + raise ValueError('Not a valid integer nor float value: {value}'.format(value=repr(value))) def validateInterval(value): - try: - parseTimeOffset(value) - except Exception: - return False - return True + try: + parseTimeOffset(value) + except (IndexError, KeyError, TypeError, ValueError) as e: + raise ValueError('Invalid interval value: {value}: {e}'.format(value=repr(value), e=str(e))) + return value def validateSeriesList(value): - return isinstance(value, list) + if not isinstance(value, list): + raise ValueError('Invalid value type, it is not a list: {value}'.format(value=repr(value))) + + for series in value: + if not isinstance(series, TimeSeries): + raise ValueError('Invalid type "{type}", should be TimeSeries'.format(type=type(series))) + + return value + + +def validateSeriesLists(value): + if not isinstance(value, list): + raise ValueError('Invalid value type, it is not a list: {value}'.format(value=repr(value))) + + for entry in value: + validateSeriesList(entry) + + return value ParamType.register('boolean', validateBoolean) @@ -68,7 +117,7 @@ def validateSeriesList(value): ParamType.register('nodeOrTag') ParamType.register('series') ParamType.register('seriesList', validateSeriesList) -ParamType.register('seriesLists', validateSeriesList) +ParamType.register('seriesLists', validateSeriesLists) ParamType.register('string') ParamType.register('tag') @@ -78,160 +127,189 @@ def validateSeriesList(value): class ParamTypeAggFunc(ParamType): - def __init__(self, name, validator=None): - if validator is None: - validator = self.validateAggFuncs + def __init__(self, name, validator=None): + if validator is None: + validator = self.validateAggFuncs - super(ParamTypeAggFunc, self).__init__(name=name, validator=validator) - self.options = self.getValidAggFuncs() + super(ParamTypeAggFunc, self).__init__(name=name, validator=validator) + self.options = self.getValidAggFuncs() - @classmethod - def getValidAggFuncs(cls): - return list(aggFuncs.keys()) + list(aggFuncAliases.keys()) + @classmethod + def getValidAggFuncs(cls): + return list(aggFuncs.keys()) + list(aggFuncAliases.keys()) - @classmethod - def getDeprecatedAggFuncs(cls): - return [name + 'Series' for name in cls.getValidAggFuncs()] + @classmethod + def getDeprecatedAggFuncs(cls): + return [name + 'Series' for name in cls.getValidAggFuncs()] - @classmethod - def getAllValidAggFuncs(cls): - return cls.getValidAggFuncs() + cls.getDeprecatedAggFuncs() + @classmethod + def getAllValidAggFuncs(cls): + return cls.getValidAggFuncs() + cls.getDeprecatedAggFuncs() - def validateAggFuncs(self, value): - if value in self.getValidAggFuncs(): - return True + def validateAggFuncs(self, value): + if value in self.getValidAggFuncs(): + return value - if value in self.getDeprecatedAggFuncs(): - log.warning('Deprecated aggregation function "{value}" used'.format(value=value)) - return True + if value in self.getDeprecatedAggFuncs(): + log.warning('Deprecated aggregation function: {value}'.format(value=repr(value))) + return value - return False + raise ValueError('Invalid aggregation function: {value}'.format(value=repr(value))) ParamTypeAggFunc.register('aggFunc') class ParamTypeAggOrSeriesFunc(ParamTypeAggFunc): - options = [] + options = [] - def __init__(self, name, validator=None): - if validator is None: - validator = self.validateAggOrSeriesFuncs - super(ParamTypeAggOrSeriesFunc, self).__init__(name=name, validator=validator) + def __init__(self, name, validator=None): + if validator is None: + validator = self.validateAggOrSeriesFuncs + super(ParamTypeAggOrSeriesFunc, self).__init__(name=name, validator=validator) - def setSeriesFuncs(self, funcs): - # check for each of the series functions whether they have an 'aggregator' - # property being set to 'True'. If so we consider them valid aggregators. - for name, func in funcs.items(): - if getattr(func, 'aggregator', False) is not True: - continue + def setSeriesFuncs(self, funcs): + # check for each of the series functions whether they have an 'aggregator' + # property being set to 'True'. If so we consider them valid aggregators. + for name, func in funcs.items(): + if getattr(func, 'aggregator', False) is not True: + continue - self.options.append(name) + self.options.append(name) - def validateAggOrSeriesFuncs(self, value): - if self.validateAggFuncs(value): - return True + def validateAggOrSeriesFuncs(self, value): + try: + return self.validateAggFuncs(value) + except ValueError: + pass - if value in self.options: - return True + if value in self.options: + return value - return False + return False ParamTypeAggOrSeriesFunc.register('aggOrSeriesFunc') class Param(object): - __slots__ = ('name', 'type', 'required', 'default', 'multiple', '_options', 'suggestions') - - def __init__(self, name, paramtype, required=False, default=None, multiple=False, options=[], - suggestions=None): - self.name = name - if not isinstance(paramtype, ParamType): - raise Exception('Invalid type %s for parameter %s' % (paramtype, name)) - self.type = paramtype - self.required = bool(required) - self.default = default - self.multiple = bool(multiple) - self._options = options - self.suggestions = suggestions - - @property - def options(self): - options = list(set(getattr(self, '_options', []) + getattr(self.type, 'options', []))) - options.sort(key=str) - return options - - def toJSON(self): - jsonVal = { - 'name': self.name, - 'type': self.type.name, - } - if self.required: - jsonVal['required'] = True - if self.default is not None: - jsonVal['default'] = self.default - if self.multiple: - jsonVal['multiple'] = True - if self.options: - jsonVal['options'] = self.options - if self.suggestions: - jsonVal['suggestions'] = self.suggestions - return jsonVal - - def validateValue(self, value, func): - # if value isn't specified and there's a default then the default will be used, - # we don't need to validate the default value because we trust that it is valid - if value is None and self.default is not None: - return True - - # None is ok for optional params - if not self.required and value is None: - return True - - # parameter is restricted to a defined set of values, but value is not in it - if self.options and value not in self.options: - raise InputParameterError( - 'Invalid option specified for function "{func}" parameter "{param}": {value}'.format( - func=func, param=self.name, value=repr(value))) - - if not self.type.isValid(value): - raise InputParameterError( - 'Invalid "{type}" value specified for function "{func}" parameter "{param}": {value}'.format( - type=self.type.name, func=func, param=self.name, value=repr(value))) - - return True + __slots__ = ('name', 'type', 'required', 'default', 'multiple', '_options', 'suggestions') + + def __init__(self, name, paramtype, required=False, default=None, multiple=False, options=None, suggestions=None): + self.name = name + if not isinstance(paramtype, ParamType): + raise Exception('Invalid type %s for parameter %s' % (paramtype, name)) + self.type = paramtype + self.required = bool(required) + self.default = default + self.multiple = bool(multiple) + if options is None: + options = [] + self._options = options + self.suggestions = suggestions + + @property + def options(self): + options = list(set(getattr(self, '_options', []) + getattr(self.type, 'options', []))) + options.sort(key=str) + return options + + def toJSON(self): + jsonVal = { + 'name': self.name, + 'type': self.type.name, + } + if self.required: + jsonVal['required'] = True + if self.default is not None: + jsonVal['default'] = self.default + if self.multiple: + jsonVal['multiple'] = True + if self.options: + jsonVal['options'] = self.options + if self.suggestions: + jsonVal['suggestions'] = self.suggestions + return jsonVal + + def validateValue(self, value, func): + # if value isn't specified and there's a default then the default will be used, + # we don't need to validate the default value because we trust that it is valid + if value is None and self.default is not None: + return value + + # None is ok for optional params + if not self.required and value is None: + return value + + # parameter is restricted to a defined set of values, but value is not in it + if self.options and value not in self.options: + raise InputParameterError( + 'Invalid option specified for function "{func}" parameter "{param}": {value}'.format( + func=func, param=self.name, value=repr(value))) + + try: + return self.type.isValid(value) + except Exception: + raise InputParameterError( + 'Invalid "{type}" value specified for function "{func}" parameter "{param}": {value}'.format( + type=self.type.name, func=func, param=self.name, value=repr(value))) def validateParams(func, params, args, kwargs): - valid_args = [] + valid_args = [] + valid_kwargs = {} - if len(params) == 0 or params[len(params)-1].multiple is False: + # total number of args + kwargs might be larger than number of params if + # the last param allows to be specified multiple times if len(args) + len(kwargs) > len(params): - raise InputParameterError( - 'Too many parameters specified for function "{func}"'.format(func=func)) - - for i in range(len(params)): - if len(args) <= i: - # requirement is satisfied from "kwargs" - value = kwargs.get(params[i].name, None) - if value is None: - if params[i].required: - # required parameter is missing - raise InputParameterError( - 'Missing required parameter "{param}" for function "{func}"'.format( - param=params[i].name, func=func)) - else: - # got multiple values for keyword argument - if params[i].name in valid_args: + if not params[-1].multiple: raise InputParameterError( - 'Keyword parameter "{param}" specified multiple times for function "{func}"'.format( - param=params[i].name, func=func)) - else: - # requirement is satisfied from "args" - value = args[i] - - params[i].validateValue(value, func) - valid_args.append(params[i].name) + 'Too many parameters specified for function "{func}"'.format(func=func)) - return True + # if args has more values than params and the last param allows multiple values, + # then we're going to validate all paramas from the args value list + args_params = params + kwargs_params = [] + else: + # take the first len(args) params and use them to validate the args values, + # use the remaining params to validate the kwargs values + args_params = params[:len(args)] + kwargs_params = params[len(args):] + + # validate the args + for (i, arg) in enumerate(args): + if i >= len(args_params): + # last parameter must be allowing multiple, + # so we're using it to validate this arg + param = args_params[-1] + else: + param = args_params[i] + + valid_args.append(param.validateValue(arg, func)) + + # validate the kwargs + for param in kwargs_params: + value = kwargs.get(param.name, None) + if value is None: + if param.required: + raise InputParameterError( + 'Missing required parameter "{param}" for function "{func}"' + .format(param=param.name, func=func)) + continue + + valid_kwargs[param.name] = param.validateValue(value, func) + + if len(kwargs) > len(valid_kwargs): + unexpected_keys = [] + for name in kwargs.keys(): + if name not in valid_kwargs: + unexpected_keys.append(name) + raise InputParameterError( + 'Unexpected key word arguments: {keys}'.format( + keys=', '.join( + key + for key in kwargs.keys() + if key not in valid_kwargs.keys() + ))) + + return (valid_args, valid_kwargs) diff --git a/webapp/graphite/functions/safe.py b/webapp/graphite/functions/safe.py index 2fb2b0063..f8399329f 100644 --- a/webapp/graphite/functions/safe.py +++ b/webapp/graphite/functions/safe.py @@ -6,27 +6,27 @@ def safeSum(values): - safeValues = [v for v in values if v is not None] - if safeValues: - return sum(safeValues) + safeValues = [v for v in values if v is not None] + if safeValues: + return sum(safeValues) def safeDiff(values): - safeValues = [v for v in values if v is not None] - if safeValues: - values = list(map(lambda x: x*-1, safeValues[1:])) - values.insert(0, safeValues[0]) - return sum(values) + safeValues = [v for v in values if v is not None] + if safeValues: + values = list(map(lambda x: x*-1, safeValues[1:])) + values.insert(0, safeValues[0]) + return sum(values) def safeLen(values): - return len([v for v in values if v is not None]) + return len([v for v in values if v is not None]) def safeDiv(a, b): - if a is None: return None - if b in (0,None): return None - return a / b + if a is None: return None + if b in (0,None): return None + return a / b def safeExp(a): @@ -37,90 +37,93 @@ def safeExp(a): def safePow(a, b): - if a is None: return None - if b is None: return None - try: - result = math.pow(a, b) - except (ValueError, OverflowError): - return None - return result + if a is None or b is None: + return None + try: + result = math.pow(a, b) + except (ValueError, OverflowError): + return None + return result def safeMul(*factors): - if None in factors: - return None + if None in factors: + return None - factors = [float(x) for x in factors] - product = reduce(lambda x,y: x*y, factors) - return product + factors = [float(x) for x in factors] + product = reduce(lambda x,y: x*y, factors) + return product def safeSubtract(a,b): - if a is None or b is None: return None + if a is None or b is None: + return None return float(a) - float(b) def safeAvg(values): - safeValues = [v for v in values if v is not None] - if safeValues: - return sum(safeValues) / len(safeValues) + safeValues = [v for v in values if v is not None] + if safeValues: + return sum(safeValues) / len(safeValues) def safeAvgZero(values): - if values: - return sum([0 if v is None else v for v in values]) / len(values) + if values: + return sum([0 if v is None else v for v in values]) / len(values) def safeMedian(values): - safeValues = [v for v in values if v is not None] - if safeValues: - sortedVals = sorted(safeValues) - mid = len(sortedVals) // 2 - if len(sortedVals) % 2 == 0: - return float(sortedVals[mid-1] + sortedVals[mid]) / 2 - else: - return sortedVals[mid] + safeValues = [v for v in values if v is not None] + if safeValues: + sortedVals = sorted(safeValues) + mid = len(sortedVals) // 2 + if len(sortedVals) % 2 == 0: + return float(sortedVals[mid-1] + sortedVals[mid]) / 2 + else: + return sortedVals[mid] def safeStdDev(a): - sm = safeSum(a) - ln = safeLen(a) - avg = safeDiv(sm,ln) - if avg is None: return None - sum = 0 - safeValues = [v for v in a if v is not None] - for val in safeValues: - sum = sum + (val - avg) * (val - avg) - return math.sqrt(sum/ln) + sm = safeSum(a) + ln = safeLen(a) + avg = safeDiv(sm,ln) + if avg is None: + return None + sum = 0 + safeValues = [v for v in a if v is not None] + for val in safeValues: + sum = sum + (val - avg) * (val - avg) + return math.sqrt(sum/ln) def safeLast(values): - for v in reversed(values): - if v is not None: return v + for v in reversed(values): + if v is not None: return v def safeMin(values, default=None): - safeValues = [v for v in values if v is not None] - if safeValues: - return min(safeValues) - else: - return default + safeValues = [v for v in values if v is not None] + if safeValues: + return min(safeValues) + else: + return default def safeMax(values, default=None): - safeValues = [v for v in values if v is not None] - if safeValues: - return max(safeValues) - else: - return default + safeValues = [v for v in values if v is not None] + if safeValues: + return max(safeValues) + else: + return default def safeMap(function, values): - safeValues = [v for v in values if v is not None] - if safeValues: - return [function(x) for x in safeValues] + safeValues = [v for v in values if v is not None] + if safeValues: + return [function(x) for x in safeValues] def safeAbs(value): - if value is None: return None - return abs(value) + if value is None: + return None + return abs(value) diff --git a/webapp/graphite/functions/views.py b/webapp/graphite/functions/views.py index 6895dbbde..d4ad31075 100644 --- a/webapp/graphite/functions/views.py +++ b/webapp/graphite/functions/views.py @@ -1,53 +1,59 @@ +import json + from graphite.util import jsonResponse, HttpResponse, HttpError from graphite.functions import SeriesFunctions, SeriesFunction, PieFunctions, PieFunction, functionInfo -def jsonEncoder(obj): - if hasattr(obj, 'toJSON'): - return obj.toJSON() - return obj.__dict__ +class jsonInfinityEncoder(json.JSONEncoder): + def encode(self, o): + return super(jsonInfinityEncoder, self).encode(o).replace('Infinity,', '1e9999,') + + def default(self, o): + if hasattr(o, 'toJSON'): + return o.toJSON() + return o.__dict__ -@jsonResponse(default=jsonEncoder) +@jsonResponse(encoder=jsonInfinityEncoder) def functionList(request, queryParams): - if request.method != 'GET': - return HttpResponse(status=405) - - if queryParams.get('type') == 'pie': - funcs = PieFunctions() - else: - funcs = SeriesFunctions() - - grouped = queryParams.get('grouped', '').lower() in ['1', 'true'] - group = queryParams.get('group') - result = {} - - for (name, func) in funcs.items(): - info = functionInfo(name, func) - if group is not None and group != info['group']: - continue - - if grouped: - if info['group'] not in result: - result[info['group']] = {} - result[info['group']][name] = info + if request.method != 'GET': + return HttpResponse(status=405) + + if queryParams.get('type') == 'pie': + funcs = PieFunctions() else: - result[name] = info + funcs = SeriesFunctions() - return result + grouped = queryParams.get('grouped', '').lower() in ['1', 'true'] + group = queryParams.get('group') + result = {} + for (name, func) in funcs.items(): + info = functionInfo(name, func) + if group is not None and group != info['group']: + continue -@jsonResponse(default=jsonEncoder) -def functionDetails(request, queryParams, name): - if request.method != 'GET': - return HttpResponse(status=405) + if grouped: + if info['group'] not in result: + result[info['group']] = {} + result[info['group']][name] = info + else: + result[name] = info - try: - if queryParams.get('type') == 'pie': - func = PieFunction(name) - else: - func = SeriesFunction(name) - except KeyError: - raise HttpError('Function not found: %s' % name, status=404) + return result - return functionInfo(name, func) + +@jsonResponse(encoder=jsonInfinityEncoder) +def functionDetails(request, queryParams, name): + if request.method != 'GET': + return HttpResponse(status=405) + + try: + if queryParams.get('type') == 'pie': + func = PieFunction(name) + else: + func = SeriesFunction(name) + except KeyError: + raise HttpError('Function not found: %s' % name, status=404) + + return functionInfo(name, func) diff --git a/webapp/graphite/intervals.py b/webapp/graphite/intervals.py index d45ddee22..42d585efb 100644 --- a/webapp/graphite/intervals.py +++ b/webapp/graphite/intervals.py @@ -3,153 +3,151 @@ class IntervalSet: - __slots__ = ('intervals', 'size') - def __init__(self, intervals, disjoint=False): - self.intervals = intervals + def __init__(self, intervals, disjoint=False): + self.intervals = intervals - if not disjoint: - self.intervals = union_overlapping(self.intervals) + if not disjoint: + self.intervals = union_overlapping(self.intervals) - self.size = sum(i.size for i in self.intervals) + self.size = sum(i.size for i in self.intervals) - def __repr__(self): - return repr(self.intervals) + def __repr__(self): + return repr(self.intervals) - def __iter__(self): - return iter(self.intervals) + def __iter__(self): + return iter(self.intervals) - def __len__(self): - return len(self.intervals) + def __len__(self): + return len(self.intervals) - def __getitem__(self, i): - return self.intervals[i] + def __getitem__(self, i): + return self.intervals[i] - def __nonzero__(self): - return self.size != 0 + def __nonzero__(self): + return self.size != 0 - def __sub__(self, other): - return self.intersect( other.complement() ) + def __sub__(self, other): + return self.intersect( other.complement() ) - def complement(self): - complementary = [] - cursor = NEGATIVE_INFINITY + def complement(self): + complementary = [] + cursor = NEGATIVE_INFINITY - for interval in self.intervals: - if cursor < interval.start: - complementary.append( Interval(cursor, interval.start) ) - cursor = interval.end + for interval in self.intervals: + if cursor < interval.start: + complementary.append( Interval(cursor, interval.start) ) + cursor = interval.end - if cursor < INFINITY: - complementary.append( Interval(cursor, INFINITY) ) + if cursor < INFINITY: + complementary.append( Interval(cursor, INFINITY) ) - return IntervalSet(complementary, disjoint=True) + return IntervalSet(complementary, disjoint=True) - def intersect(self, other): #XXX The last major bottleneck. Factorial-time hell. - # Then again, this function is entirely unused... - if (not self) or (not other): - return IntervalSet([]) + def intersect(self, other): #XXX The last major bottleneck. Factorial-time hell. + # Then again, this function is entirely unused... + if (not self) or (not other): + return IntervalSet([]) - #earliest = max(self.intervals[0].start, other.intervals[0].start) - #latest = min(self.intervals[-1].end, other.intervals[-1].end) + #earliest = max(self.intervals[0].start, other.intervals[0].start) + #latest = min(self.intervals[-1].end, other.intervals[-1].end) - #mine = [i for i in self.intervals if i.start >= earliest and i.end <= latest] - #theirs = [i for i in other.intervals if i.start >= earliest and i.end <= latest] + #mine = [i for i in self.intervals if i.start >= earliest and i.end <= latest] + #theirs = [i for i in other.intervals if i.start >= earliest and i.end <= latest] - intersections = [x for x in (i.intersect(j) - for i in self.intervals - for j in other.intervals) - if x] + intersections = [x for x in (i.intersect(j) + for i in self.intervals + for j in other.intervals) + if x] - return IntervalSet(intersections, disjoint=True) + return IntervalSet(intersections, disjoint=True) - def intersect_interval(self, interval): - intersections = [x for x in (i.intersect(interval) - for i in self.intervals) - if x] - return IntervalSet(intersections, disjoint=True) + def intersect_interval(self, interval): + intersections = [x for x in (i.intersect(interval) + for i in self.intervals) + if x] + return IntervalSet(intersections, disjoint=True) - def union(self, other): - return IntervalSet( sorted(self.intervals + other.intervals) ) + def union(self, other): + return IntervalSet( sorted(self.intervals + other.intervals) ) class Interval: - __slots__ = ('start', 'end', 'tuple', 'size') - def __init__(self, start, end): - if end - start < 0: - raise ValueError("Invalid interval start=%s end=%s" % (start, end)) + def __init__(self, start, end): + if end - start < 0: + raise ValueError("Invalid interval start=%s end=%s" % (start, end)) - self.start = start - self.end = end - self.tuple = (start, end) - self.size = self.end - self.start + self.start = start + self.end = end + self.tuple = (start, end) + self.size = self.end - self.start - def __eq__(self, other): - return self.tuple == other.tuple + def __eq__(self, other): + return self.tuple == other.tuple - def __ne__(self, other): - return self.tuple != other.tuple + def __ne__(self, other): + return self.tuple != other.tuple - def __hash__(self): - return hash( self.tuple ) + def __hash__(self): + return hash( self.tuple ) - def __lt__(self, other): - return self.start < self.start + def __lt__(self, other): + return self.start < self.start - def __le__(self, other): - return self.start <= self.start + def __le__(self, other): + return self.start <= self.start - def __gt__(self, other): - return self.start > self.start + def __gt__(self, other): + return self.start > self.start - def __ge__(self, other): - return self.start >= self.start + def __ge__(self, other): + return self.start >= self.start - def __cmp__(self, other): - return (self.start > other.start) - (self.start < other.start) + def __cmp__(self, other): + return (self.start > other.start) - (self.start < other.start) - def __len__(self): - raise TypeError("len() doesn't support infinite values, use the 'size' attribute instead") + def __len__(self): + raise TypeError("len() doesn't support infinite values, use the 'size' attribute instead") - def __nonzero__(self): # Python 2 - return self.size != 0 + def __nonzero__(self): # Python 2 + return self.size != 0 - def __bool__(self): # Python 3 - return self.size != 0 + def __bool__(self): # Python 3 + return self.size != 0 - def __repr__(self): - return '' % str(self.tuple) + def __repr__(self): + return '' % str(self.tuple) - def intersect(self, other): - start = max(self.start, other.start) - end = min(self.end, other.end) + def intersect(self, other): + start = max(self.start, other.start) + end = min(self.end, other.end) - if end > start: - return Interval(start, end) + if end > start: + return Interval(start, end) - def overlaps(self, other): - earlier = self if self.start <= other.start else other - later = self if earlier is other else other - return earlier.end >= later.start + def overlaps(self, other): + earlier = self if self.start <= other.start else other + later = self if earlier is other else other + return earlier.end >= later.start - def union(self, other): - if not self.overlaps(other): - raise TypeError("Union of disjoint intervals is not an interval") + def union(self, other): + if not self.overlaps(other): + raise TypeError("Union of disjoint intervals is not an interval") - start = min(self.start, other.start) - end = max(self.end, other.end) - return Interval(start, end) + start = min(self.start, other.start) + end = max(self.end, other.end) + return Interval(start, end) def union_overlapping(intervals): - """Union any overlapping intervals in the given set.""" - disjoint_intervals = [] + """Union any overlapping intervals in the given set.""" + disjoint_intervals = [] - for interval in intervals: - if disjoint_intervals and disjoint_intervals[-1].overlaps(interval): - disjoint_intervals[-1] = disjoint_intervals[-1].union(interval) - else: - disjoint_intervals.append(interval) + for interval in intervals: + if disjoint_intervals and disjoint_intervals[-1].overlaps(interval): + disjoint_intervals[-1] = disjoint_intervals[-1].union(interval) + else: + disjoint_intervals.append(interval) - return disjoint_intervals + return disjoint_intervals diff --git a/webapp/graphite/local_settings.py.example b/webapp/graphite/local_settings.py.example index ac4ee53ea..42fcc8a87 100644 --- a/webapp/graphite/local_settings.py.example +++ b/webapp/graphite/local_settings.py.example @@ -36,6 +36,7 @@ #LOG_ROTATION_COUNT = 1 #LOG_RENDERING_PERFORMANCE = True #LOG_CACHE_PERFORMANCE = True +#LOG_INFO_PERFORMANCE = False # Filenames for log output, set to '-' to log to stderr #LOG_FILE_INFO = 'info.log' @@ -94,6 +95,9 @@ DEFAULT_XFILES_FACTOR = 0 #FIND_TIMEOUT = 3.0 # Timeout for metric find requests #FETCH_TIMEOUT = 3.0 # Timeout to fetch series data +# Allow UTF-8 metrics' names (can cause performance issues) +#UTF8_METRICS = False + ##################################### # Filesystem Paths # ##################################### @@ -334,7 +338,10 @@ DEFAULT_XFILES_FACTOR = 0 #CARBONLINK_HOSTS = ["127.0.0.1:7002:a", "127.0.0.1:7102:b", "127.0.0.1:7202:c"] #CARBONLINK_TIMEOUT = 1.0 #CARBONLINK_RETRY_DELAY = 15 # Seconds to blacklist a failed remote server -# +# Set pickle protocol to use for Carbonlink requests, +# (default of -1 is HIGHEST_AVAILABLE for your Python version) +# see more: https://docs.python.org/3/library/pickle.html#data-stream-format +#CARBONLINK_PICKLE_PROTOCOL = -1 # Type of metric hashing function. # The default `carbon_ch` is Graphite's traditional consistent-hashing implementation. diff --git a/webapp/graphite/logger.py b/webapp/graphite/logger.py index c75f3802c..10bdcd2f1 100644 --- a/webapp/graphite/logger.py +++ b/webapp/graphite/logger.py @@ -21,76 +21,76 @@ class GraphiteLogger: - def __init__(self): - self.infoLogger = self._config_logger( - settings.LOG_FILE_INFO, - 'info', - True, - level = logging.DEBUG if settings.DEBUG else logging.INFO, - ) - self.exceptionLogger = self._config_logger( - settings.LOG_FILE_EXCEPTION, - 'exception', - True, - ) - self.cacheLogger = self._config_logger( - settings.LOG_FILE_CACHE, - 'cache', - settings.LOG_CACHE_PERFORMANCE, - level = logging.DEBUG if settings.DEBUG else logging.INFO, - ) - self.renderingLogger = self._config_logger( - settings.LOG_FILE_RENDERING, - 'rendering', - settings.LOG_RENDERING_PERFORMANCE, - level = logging.DEBUG if settings.DEBUG else logging.INFO, - ) - - @staticmethod - def _config_logger(log_file_name, name, activate, - level=None, when='midnight', - backupCount=settings.LOG_ROTATION_COUNT): - logger = logging.getLogger(name) - if level is not None: - logger.setLevel(level) - if activate: # if want to log this one - if log_file_name == '-': - formatter = logging.Formatter( - fmt='[%(asctime)s.%(msecs)03d] %(name)s %(levelname)s %(message)s', - datefmt='%d/%b/%Y %H:%M:%S') - handler = StreamHandler() + def __init__(self): + self.infoLogger = self._config_logger( + settings.LOG_FILE_INFO, + 'info', + settings.LOG_INFO_PERFORMANCE, + level = logging.DEBUG if settings.DEBUG else logging.INFO, + ) + self.exceptionLogger = self._config_logger( + settings.LOG_FILE_EXCEPTION, + 'exception', + True, + ) + self.cacheLogger = self._config_logger( + settings.LOG_FILE_CACHE, + 'cache', + settings.LOG_CACHE_PERFORMANCE, + level = logging.DEBUG if settings.DEBUG else logging.INFO, + ) + self.renderingLogger = self._config_logger( + settings.LOG_FILE_RENDERING, + 'rendering', + settings.LOG_RENDERING_PERFORMANCE, + level = logging.DEBUG if settings.DEBUG else logging.INFO, + ) + + @staticmethod + def _config_logger(log_file_name, name, activate, + level=None, when='midnight', + backupCount=settings.LOG_ROTATION_COUNT): + logger = logging.getLogger(name) + if level is not None: + logger.setLevel(level) + if activate: # if want to log this one + if log_file_name == '-': + formatter = logging.Formatter( + fmt='[%(asctime)s.%(msecs)03d] %(name)s %(levelname)s %(message)s', + datefmt='%d/%b/%Y %H:%M:%S') + handler = StreamHandler() + else: + formatter = logging.Formatter( + fmt='%(asctime)s.%(msecs)03d :: %(message)s', + datefmt='%Y-%m-%d,%H:%M:%S') + log_file = os.path.join(settings.LOG_DIR, log_file_name) + if settings.LOG_ROTATION: # if we want to rotate logs + handler = Rotater(log_file, when=when, backupCount=backupCount) + else: # let someone else, e.g. logrotate, rotate the logs + handler = FileHandler(log_file) + handler.setFormatter(formatter) + logger.addHandler(handler) else: - formatter = logging.Formatter( - fmt='%(asctime)s.%(msecs)03d :: %(message)s', - datefmt='%Y-%m-%d,%H:%M:%S') - log_file = os.path.join(settings.LOG_DIR, log_file_name) - if settings.LOG_ROTATION: # if we want to rotate logs - handler = Rotater(log_file, when=when, backupCount=backupCount) - else: # let someone else, e.g. logrotate, rotate the logs - handler = FileHandler(log_file) - handler.setFormatter(formatter) - logger.addHandler(handler) - else: - logger.addHandler(NullHandler()) - return logger - - def info(self,msg,*args,**kwargs): - return self.infoLogger.info(msg,*args,**kwargs) - - def debug(self,msg,*args,**kwargs): - return self.infoLogger.debug(msg,*args,**kwargs) - - def warning(self,msg,*args,**kwargs): - return self.infoLogger.warn(msg,*args,**kwargs) - - def exception(self,msg="Exception Caught",**kwargs): - return self.exceptionLogger.exception(msg,**kwargs) - - def cache(self,msg,*args,**kwargs): - return self.cacheLogger.info(msg,*args,**kwargs) - - def rendering(self,msg,*args,**kwargs): - return self.renderingLogger.info(msg,*args,**kwargs) + logger.addHandler(NullHandler()) + return logger + + def info(self,msg,*args,**kwargs): + return self.infoLogger.info(msg,*args,**kwargs) + + def debug(self,msg,*args,**kwargs): + return self.infoLogger.debug(msg,*args,**kwargs) + + def warning(self,msg,*args,**kwargs): + return self.infoLogger.warn(msg,*args,**kwargs) + + def exception(self,msg="Exception Caught",**kwargs): + return self.exceptionLogger.exception(msg,**kwargs) + + def cache(self,msg,*args,**kwargs): + return self.cacheLogger.info(msg,*args,**kwargs) + + def rendering(self,msg,*args,**kwargs): + return self.renderingLogger.info(msg,*args,**kwargs) log = GraphiteLogger() # import-shared logger instance diff --git a/webapp/graphite/metrics/views.py b/webapp/graphite/metrics/views.py index f7a1a42f1..1b253fd89 100644 --- a/webapp/graphite/metrics/views.py +++ b/webapp/graphite/metrics/views.py @@ -132,9 +132,14 @@ def find_view(request): automatic_variants = queryParamAsInt(queryParams, 'automatic_variants', 0) try: - query = str(queryParams['query']) + if type(queryParams['query']) is unicode: + query = queryParams['query'].encode('utf-8') + else: + query = str(queryParams['query']) except KeyError: raise InputParameterError('Missing required parameter \'query\'') + except NameError: + query = str(queryParams['query']) if query == '': raise InputParameterError('Required parameter \'query\' is empty') diff --git a/webapp/graphite/node.py b/webapp/graphite/node.py index d10609676..e3f055675 100644 --- a/webapp/graphite/node.py +++ b/webapp/graphite/node.py @@ -1,42 +1,42 @@ class Node(object): - __slots__ = ('name', 'path', 'local', 'is_leaf') + __slots__ = ('name', 'path', 'local', 'is_leaf') - def __init__(self, path): - self.path = path - self.name = path.split('.')[-1] - self.local = True - self.is_leaf = False + def __init__(self, path): + self.path = path + self.name = path.split('.')[-1] + self.local = True + self.is_leaf = False - def __repr__(self): - return '<%s[%x]: %s>' % (self.__class__.__name__, id(self), self.path) + def __repr__(self): + return '<%s[%x]: %s>' % (self.__class__.__name__, id(self), self.path) class BranchNode(Node): - pass + pass class LeafNode(Node): - __slots__ = ('reader', ) + __slots__ = ('reader', ) - def __init__(self, path, reader): - Node.__init__(self, path) - self.reader = reader - self.is_leaf = True + def __init__(self, path, reader): + Node.__init__(self, path) + self.reader = reader + self.is_leaf = True - def fetch(self, startTime, endTime, now=None, requestContext=None): - try: - result = self.reader.fetch(startTime, endTime, now, requestContext) - except TypeError: - # Support for legacy 3rd party, readers. - result = self.reader.fetch(startTime, endTime) + def fetch(self, startTime, endTime, now=None, requestContext=None): + try: + result = self.reader.fetch(startTime, endTime, now, requestContext) + except TypeError: + # Support for legacy 3rd party, readers. + result = self.reader.fetch(startTime, endTime) - return result + return result - @property - def intervals(self): - return self.reader.get_intervals() + @property + def intervals(self): + return self.reader.get_intervals() - def __repr__(self): - return '' % (id(self), self.path, self.reader) + def __repr__(self): + return '' % (id(self), self.path, self.reader) diff --git a/webapp/graphite/readers/remote.py b/webapp/graphite/readers/remote.py index b0cf3ece9..8322c9ca2 100644 --- a/webapp/graphite/readers/remote.py +++ b/webapp/graphite/readers/remote.py @@ -7,87 +7,86 @@ class RemoteReader(BaseReader): - __slots__ = ( - 'finder', - 'metric_path', - 'intervals', - 'bulk_query', - ) - - def __init__(self, finder, node_info, bulk_query=None): - self.finder = finder - self.metric_path = node_info.get('path') or node_info.get('metric_path') - self.intervals = node_info.get('intervals', []) - self.bulk_query = set(bulk_query) if bulk_query else ( - [self.metric_path] if self.metric_path else [] + __slots__ = ( + 'finder', + 'metric_path', + 'intervals', + 'bulk_query', ) - def __repr__(self): - return '' % (id(self), self.finder.host, ','.join(self.bulk_query)) - - def get_intervals(self): - return self.intervals - - def fetch(self, startTime, endTime, now=None, requestContext=None): - for series in self.fetch_multi(startTime, endTime, now, requestContext): - if series['name'] == self.metric_path: - return (series['time_info'], series['values']) - - def fetch_multi(self, startTime, endTime, now=None, requestContext=None): - if not self.bulk_query: - return [] - - query_params = [ - ('format', self.finder.params.get('format', 'pickle')), - ('local', self.finder.params.get('local', '1')), - ('noCache', '1'), - ('from', int(startTime)), - ('until', int(endTime)) - ] - - for target in self.bulk_query: - query_params.append(('target', target)) - - if now is not None: - query_params.append(('now', int(now))) - - headers = requestContext.get('forwardHeaders') if requestContext else None - - retries = 1 # start counting at one to make log output and settings more readable - while True: - try: - result = self.finder.request( - '/render/', - fields=query_params, - headers=headers, - timeout=settings.FETCH_TIMEOUT, + def __init__(self, finder, node_info, bulk_query=None): + self.finder = finder + self.metric_path = node_info.get('path') or node_info.get('metric_path') + self.intervals = node_info.get('intervals', []) + self.bulk_query = set(bulk_query) if bulk_query else ( + [self.metric_path] if self.metric_path else [] ) - break - except Exception: - if retries >= settings.MAX_FETCH_RETRIES: - log.exception("Failed after %s attempts! Root cause:\n%s" % - (settings.MAX_FETCH_RETRIES, format_exc())) - raise - else: - log.exception("Got an exception when fetching data! Try: %i of %i. Root cause:\n%s" % - (retries, settings.MAX_FETCH_RETRIES, format_exc())) - retries += 1 - - data = self.finder.deserialize(result) - - try: - return [ - { - 'pathExpression': series.get('pathExpression', series['name']), - 'name': series['name'], - 'time_info': (series['start'], series['end'], series['step']), - 'values': series['values'], - } - for series in data - ] - except Exception as err: - self.finder.fail() - log.exception( - "RemoteReader[%s] Invalid render response from %s: %s" % - (self.finder.host, result.url_full, repr(err))) - raise Exception("Invalid render response from %s: %s" % (result.url_full, repr(err))) + + def __repr__(self): + return '' % (id(self), self.finder.host, ','.join(self.bulk_query)) + + def get_intervals(self): + return self.intervals + + def fetch(self, startTime, endTime, now=None, requestContext=None): + for series in self.fetch_multi(startTime, endTime, now, requestContext): + if series['name'] == self.metric_path: + return (series['time_info'], series['values']) + + def fetch_multi(self, startTime, endTime, now=None, requestContext=None): + if not self.bulk_query: + return [] + + query_params = [ + ('format', self.finder.params.get('format', 'pickle')), + ('local', self.finder.params.get('local', '1')), + ('noCache', '1'), + ('from', int(startTime)), + ('until', int(endTime)) + ] + + for target in self.bulk_query: + query_params.append(('target', target)) + + if now is not None: + query_params.append(('now', int(now))) + + headers = requestContext.get('forwardHeaders') if requestContext else None + + retries = 1 # start counting at one to make log output and settings more readable + while True: + try: + result = self.finder.request( + '/render/', + fields=query_params, + headers=headers, + timeout=settings.FETCH_TIMEOUT, + ) + break + except Exception: + if retries >= settings.MAX_FETCH_RETRIES: + log.exception("Failed after %s attempts! Root cause:\n%s" % + (settings.MAX_FETCH_RETRIES, format_exc())) + raise + else: + log.exception("Got an exception when fetching data! Try: %i of %i. Root cause:\n%s" % + (retries, settings.MAX_FETCH_RETRIES, format_exc())) + retries += 1 + + data = self.finder.deserialize(result) + + try: + return [ + { + 'pathExpression': series.get('pathExpression', series['name']), + 'name': series['name'], + 'time_info': (series['start'], series['end'], series['step']), + 'values': series['values'], + } + for series in data + ] + except Exception as err: + self.finder.fail() + log.exception("RemoteReader[%s] Invalid render response from %s: %s" % + (self.finder.host, result.url_full, repr(err))) + raise Exception("Invalid render response from %s: %s" % (result.url_full, repr(err))) diff --git a/webapp/graphite/render/attime.py b/webapp/graphite/render/attime.py index 7ddffb602..fcc44e70f 100644 --- a/webapp/graphite/render/attime.py +++ b/webapp/graphite/render/attime.py @@ -28,167 +28,173 @@ def parseATTime(s, tzinfo=None, now=None): - if tzinfo is None: - tzinfo = pytz.timezone(settings.TIME_ZONE) - if isinstance(s, datetimetype): - if s.tzinfo: - return s.astimezone(tzinfo) - return tzinfo.localize(s) - - s = s.strip().lower().replace('_','').replace(',','').replace(' ','') - if s.isdigit(): - if len(s) == 8 and int(s[:4]) > 1900 and int(s[4:6]) < 13 and int(s[6:]) < 32: - pass #Fall back because its not a timestamp, its YYYYMMDD form + if tzinfo is None: + tzinfo = pytz.timezone(settings.TIME_ZONE) + if isinstance(s, datetimetype): + if s.tzinfo: + return s.astimezone(tzinfo) + return tzinfo.localize(s) + + s = s.strip().lower().replace('_','').replace(',','').replace(' ','') + if s.isdigit(): + if len(s) == 8 and int(s[:4]) > 1900 and int(s[4:6]) < 13 and int(s[6:]) < 32: + pass # Fall back because its not a timestamp, its YYYYMMDD form + else: + return datetime.fromtimestamp(int(s),tzinfo) + if '+' in s: + ref,offset = s.split('+',1) + offset = '+' + offset + elif '-' in s: + ref,offset = s.split('-',1) + offset = '-' + offset else: - return datetime.fromtimestamp(int(s),tzinfo) - if '+' in s: - ref,offset = s.split('+',1) - offset = '+' + offset - elif '-' in s: - ref,offset = s.split('-',1) - offset = '-' + offset - else: - ref,offset = s,'' + ref,offset = s,'' - return tzinfo.normalize(parseTimeReference(ref, tzinfo, now) + parseTimeOffset(offset)) + return tzinfo.normalize(parseTimeReference(ref, tzinfo, now) + parseTimeOffset(offset)) def parseTimeReference(ref, tzinfo=None, now=None): - if tzinfo is None: - tzinfo = pytz.timezone(settings.TIME_ZONE) - if isinstance(ref, datetimetype): - if ref.tzinfo: - return ref.astimezone(tzinfo) - return tzinfo.localize(ref) - - if now is None: - now = datetime.now(tzinfo) - else: - now = parseATTime(now, tzinfo) - - if not ref or ref == 'now': return now - - rawRef = ref - - # Time-of-day reference - i = ref.find(':') - hour,minute = 0,0 - if 0 < i < 3: - hour = int( ref[:i] ) - minute = int( ref[i+1:i+3] ) - ref = ref[i+3:] - if ref[:2] == 'am': - ref = ref[2:] - elif ref[:2] == 'pm': - hour = (hour + 12) % 24 - ref = ref[2:] - - # Xam or XXam - i = ref.find('am') - if 0 < i < 3: - hour = int( ref[:i] ) - ref = ref[i+2:] - - # Xpm or XXpm - i = ref.find('pm') - if 0 < i < 3: - hour = (int( ref[:i] ) + 12) % 24 - ref = ref[i+2:] - - if ref.startswith('noon'): - hour,minute = 12,0 - ref = ref[4:] - elif ref.startswith('midnight'): - hour,minute = 0,0 - ref = ref[8:] - elif ref.startswith('teatime'): - hour,minute = 16,0 - ref = ref[7:] - - refDate = now.replace(hour=hour,minute=minute,second=0,microsecond=0,tzinfo=None) - - # Day reference - if ref in ('yesterday','today','tomorrow'): # yesterday, today, tomorrow - if ref == 'yesterday': - refDate -= timedelta(days=1) - elif ref == 'tomorrow': - refDate += timedelta(days=1) - - elif ref.count('/') == 2: # MM/DD/YY[YY] - m,d,y = map(int,ref.split('/')) - if y < 1900: y += 1900 - if y < 1970: y += 100 - refDate = datetime(year=y,month=m,day=d,hour=hour,minute=minute) - - elif len(ref) == 8 and ref.isdigit(): # YYYYMMDD - refDate = datetime(year=int(ref[:4]),month=int(ref[4:6]),day=int(ref[6:8]),hour=hour,minute=minute) - - elif ref[:3] in months: # MonthName DayOfMonth - d = None - if ref[-2:].isdigit(): - d = int(ref[-2:]) - elif ref[-1:].isdigit(): - d = int(ref[-1:]) + if tzinfo is None: + tzinfo = pytz.timezone(settings.TIME_ZONE) + if isinstance(ref, datetimetype): + if ref.tzinfo: + return ref.astimezone(tzinfo) + return tzinfo.localize(ref) + + if now is None: + now = datetime.now(tzinfo) else: - raise Exception("Day of month required after month name") - refDate = datetime(year=refDate.year,month=months.index(ref[:3]) + 1,day=d,hour=hour,minute=minute) + now = parseATTime(now, tzinfo) - elif ref[:3] in weekdays: # DayOfWeek (Monday, etc) - todayDayName = refDate.strftime("%a").lower()[:3] - today = weekdays.index( todayDayName ) - twoWeeks = weekdays * 2 - dayOffset = today - twoWeeks.index(ref[:3]) - if dayOffset < 0: dayOffset += 7 - refDate -= timedelta(days=dayOffset) + if not ref or ref == 'now': + return now - elif ref: - raise ValueError("Unknown day reference: %s" % rawRef) + rawRef = ref - return tzinfo.localize(refDate) + # Time-of-day reference + i = ref.find(':') + hour,minute = 0,0 + if 0 < i < 3: + hour = int( ref[:i] ) + minute = int( ref[i+1:i+3] ) + ref = ref[i+3:] + if ref[:2] == 'am': + ref = ref[2:] + elif ref[:2] == 'pm': + hour = (hour + 12) % 24 + ref = ref[2:] + + # Xam or XXam + i = ref.find('am') + if 0 < i < 3: + hour = int( ref[:i] ) + ref = ref[i+2:] + + # Xpm or XXpm + i = ref.find('pm') + if 0 < i < 3: + hour = (int( ref[:i] ) + 12) % 24 + ref = ref[i+2:] + + if ref.startswith('noon'): + hour,minute = 12,0 + ref = ref[4:] + elif ref.startswith('midnight'): + hour,minute = 0,0 + ref = ref[8:] + elif ref.startswith('teatime'): + hour,minute = 16,0 + ref = ref[7:] + + refDate = now.replace(hour=hour,minute=minute,second=0,microsecond=0,tzinfo=None) + + # Day reference + if ref in ('yesterday','today','tomorrow'): # yesterday, today, tomorrow + if ref == 'yesterday': + refDate -= timedelta(days=1) + elif ref == 'tomorrow': + refDate += timedelta(days=1) + + elif ref.count('/') == 2: # MM/DD/YY[YY] + m,d,y = map(int,ref.split('/')) + if y < 1900: + y += 1900 + if y < 1970: + y += 100 + refDate = datetime(year=y,month=m,day=d,hour=hour,minute=minute) + + elif len(ref) == 8 and ref.isdigit(): # YYYYMMDD + refDate = datetime(year=int(ref[:4]), month=int(ref[4:6]), day=int(ref[6:8]), hour=hour, minute=minute) + + elif ref[:3] in months: # MonthName DayOfMonth + d = None + if ref[-2:].isdigit(): + d = int(ref[-2:]) + elif ref[-1:].isdigit(): + d = int(ref[-1:]) + else: + raise Exception("Day of month required after month name") + refDate = datetime(year=refDate.year, month=months.index(ref[:3]) + 1, day=d, hour=hour, minute=minute) + + elif ref[:3] in weekdays: # DayOfWeek (Monday, etc) + todayDayName = refDate.strftime("%a").lower()[:3] + today = weekdays.index( todayDayName ) + twoWeeks = weekdays * 2 + dayOffset = today - twoWeeks.index(ref[:3]) + if dayOffset < 0: + dayOffset += 7 + refDate -= timedelta(days=dayOffset) + + elif ref: + raise ValueError("Unknown day reference: %s" % rawRef) + + return tzinfo.localize(refDate) def parseTimeOffset(offset): - if not offset: - return timedelta() - - t = timedelta() - - if offset[0].isdigit(): - sign = 1 - else: - try: - sign = { '+' : 1, '-' : -1 }[offset[0]] - except KeyError: - raise KeyError('Invalid offset: %s' % offset) - offset = offset[1:] - - while offset: - i = 1 - while offset[:i].isdigit() and i <= len(offset): i += 1 - num = int(offset[:i-1]) - offset = offset[i-1:] - i = 1 - while offset[:i].isalpha() and i <= len(offset): i += 1 - unit = offset[:i-1] - offset = offset[i-1:] - unitString = getUnitString(unit) - if unitString == MONTHS_STRING: - unitString = DAYS_STRING - num = num * 30 - if unitString == YEARS_STRING: - unitString = DAYS_STRING - num = num * 365 - t += timedelta(**{ unitString : sign * num}) - - return t + if not offset: + return timedelta() + + t = timedelta() + + if offset[0].isdigit(): + sign = 1 + else: + try: + sign = { '+' : 1, '-' : -1 }[offset[0]] + except KeyError: + raise KeyError('Invalid offset: %s' % offset) + offset = offset[1:] + + while offset: + i = 1 + while offset[:i].isdigit() and i <= len(offset): + i += 1 + num = int(offset[:i-1]) + offset = offset[i-1:] + i = 1 + while offset[:i].isalpha() and i <= len(offset): + i += 1 + unit = offset[:i-1] + offset = offset[i-1:] + unitString = getUnitString(unit) + if unitString == MONTHS_STRING: + unitString = DAYS_STRING + num = num * 30 + if unitString == YEARS_STRING: + unitString = DAYS_STRING + num = num * 365 + t += timedelta(**{ unitString : sign * num}) + + return t def getUnitString(s): - if s.startswith('s'): return SECONDS_STRING - if s.startswith('min'): return MINUTES_STRING - if s.startswith('h'): return HOURS_STRING - if s.startswith('d'): return DAYS_STRING - if s.startswith('w'): return WEEKS_STRING - if s.startswith('mon'): return MONTHS_STRING - if s.startswith('y'): return YEARS_STRING - raise Exception("Invalid offset unit '%s'" % s) + if s.startswith('s'): return SECONDS_STRING + if s.startswith('min'): return MINUTES_STRING + if s.startswith('h'): return HOURS_STRING + if s.startswith('d'): return DAYS_STRING + if s.startswith('w'): return WEEKS_STRING + if s.startswith('mon'): return MONTHS_STRING + if s.startswith('y'): return YEARS_STRING + raise ValueError("Invalid offset unit '%s'" % s) diff --git a/webapp/graphite/render/datalib.py b/webapp/graphite/render/datalib.py index a53f05ef0..c3513bf72 100755 --- a/webapp/graphite/render/datalib.py +++ b/webapp/graphite/render/datalib.py @@ -26,6 +26,17 @@ from graphite.util import timebounds, logtime +try: + from collections import UserDict +except ImportError: + from UserDict import IterableUserDict as UserDict + + +class Tags(UserDict): + def __setitem__(self, key, value): + self.data[key] = str(value) + + class TimeSeries(list): def __init__(self, name, start, end, step, values, consolidate='average', tags=None, xFilesFactor=None, pathExpression=None): list.__init__(self, values) @@ -158,6 +169,19 @@ def datapoints(self): timestamps = range(int(self.start), int(self.end) + 1, int(self.step * self.valuesPerPoint)) return list(zip(self, timestamps)) + @property + def tags(self): + return self.__tags + + @tags.setter + def tags(self, tags): + if isinstance(tags, Tags): + self.__tags = tags + elif isinstance(tags, dict): + self.__tags = Tags(tags) + else: + raise Exception('Invalid tags specified') + # Data retrieval API @logtime diff --git a/webapp/graphite/render/evaluator.py b/webapp/graphite/render/evaluator.py index 9a73a7ae9..748626ed0 100644 --- a/webapp/graphite/render/evaluator.py +++ b/webapp/graphite/render/evaluator.py @@ -27,9 +27,15 @@ def evaluateTarget(requestContext, targets): if isinstance(target, six.string_types): if not target.strip(): continue + target = grammar.parseString(target) - result = evaluateTokens(requestContext, target) + try: + result = evaluateTokens(requestContext, target) + except InputParameterError as e: + e.setTargets(requestContext.get('targets', [])) + e.setSourceIdHeaders(requestContext.get('sourceIdHeaders', {})) + raise # we have to return a list of TimeSeries objects if isinstance(result, TimeSeries): @@ -88,11 +94,7 @@ def evaluateTokens(requestContext, tokens, replacements=None, pipedArg=None): try: func = SeriesFunction(tokens.call.funcname) except KeyError: - msg = 'Received request for unknown function: {func}'.format(func=tokens.call.funcname) - log.warning(msg) - - # even if input validation enforcement is disabled, there's nothing else we can do here - raise InputParameterError(msg) + raise InputParameterError('Received request for unknown function: {func}'.format(func=tokens.call.funcname)) rawArgs = tokens.call.args or [] if pipedArg is not None: @@ -103,18 +105,22 @@ def evaluateTokens(requestContext, tokens, replacements=None, pipedArg=None): for kwarg in tokens.call.kwargs]) def handleInvalidParameters(e): + e.setSourceIdHeaders(requestContext.get('sourceIdHeaders', {})) + e.setTargets(requestContext.get('targets', [])) + e.setFunction(tokens.call.funcname, args, kwargs) + + if settings.ENFORCE_INPUT_VALIDATION: + raise e + if not getattr(handleInvalidParameters, 'alreadyLogged', False): - log.warning(invalidParamLogMsg(requestContext, str(e), tokens.call.funcname, args, kwargs)) + log.warning('%s', str(e)) # only log invalid parameters once setattr(handleInvalidParameters, 'alreadyLogged', True) - if settings.ENFORCE_INPUT_VALIDATION: - raise - if hasattr(func, 'params'): try: - validateParams(tokens.call.funcname, func.params, args, kwargs) + (args, kwargs) = validateParams(tokens.call.funcname, func.params, args, kwargs) except InputParameterError as e: handleInvalidParameters(e) @@ -128,37 +134,6 @@ def handleInvalidParameters(e): return evaluateScalarTokens(tokens) -def invalidParamLogMsg(requestContext, exception, func, args, kwargs): - source = '' - - if 'sourceIdHeaders' in requestContext: - headers = list(requestContext['sourceIdHeaders'].keys()) - headers.sort() - for name in headers: - if source: - source += ', ' - source += '{name}: {value}'.format( - name=name, - value=requestContext['sourceIdHeaders'][name]) - - logMsg = 'Received invalid parameters ({msg}): {func} ({args})'.format( - msg=exception, - func=func, - args=', '.join( - argList - for argList in [ - ', '.join(repr(arg) for arg in args), - ', '.join('{k}={v}'.format(k=str(k), v=repr(v)) for k, v in kwargs.items()), - ] if argList - )) - - if not source: - return logMsg - - logMsg += '; source: ({source})'.format(source=source) - return logMsg - - def evaluateScalarTokens(tokens): if tokens.number: if tokens.number.integer: diff --git a/webapp/graphite/render/functions.py b/webapp/graphite/render/functions.py index d478d0e63..18b73ac6d 100644 --- a/webapp/graphite/render/functions.py +++ b/webapp/graphite/render/functions.py @@ -134,6 +134,23 @@ def matchSeries(seriesList1, seriesList2): return izip(sorted(seriesList1, key=lambda a: a.name), sorted(seriesList2, key=lambda a: a.name)) +def trimRecent(seriesList): + # trim right side of the graph to avoid dip when only part of most recent metrics has entered the system + for s in seriesList: + if len(s) > 1: + if (s[-1] is None) and (s[-2] is not None): + for sl in seriesList: + sl[-1] = None + break + for s in seriesList: + if len(s) > 2: + if (s[-2] is None) and (s[-3] is not None): + for sl in seriesList: + sl[-2] = None + break + return (seriesList) + + def formatPathExpressions(seriesList): # remove duplicates pathExpressions = [] @@ -183,6 +200,10 @@ def aggregate(requestContext, seriesList, func, xFilesFactor=None): (seriesList, start, end, step) = normalize(seriesList) except NormalizeEmptyResultError: return [] + + if (settings.RENDER_TRIM_RECENT_IN_AGGREGATE): + seriesList = trimRecent(seriesList) + xFilesFactor = xFilesFactor if xFilesFactor is not None else requestContext.get('xFilesFactor') name = "%sSeries(%s)" % (func, formatPathExpressions(seriesList)) values = ( consolidationFunc(row) if xffValues(row, xFilesFactor) else None for row in izip_longest(*seriesList) ) @@ -205,6 +226,61 @@ def aggregate(requestContext, seriesList, func, xFilesFactor=None): ] +def aggregateSeriesLists(requestContext, seriesListFirstPos, seriesListSecondPos, func, xFilesFactor=None): + """ + Iterates over a two lists and aggregates using specified function + list1[0] to list2[0], list1[1] to list2[1] and so on. + The lists will need to be the same length + + Position of seriesList matters. For example using "sum" function + ``aggregateSeriesLists(list1[0..n], list2[0..n], "sum")`` + it would find sum for each member + of the list ``list1[0] + list2[0], list1[1] + list2[1], list1[n] + list2[n]``. + + Example: + + .. code-block:: none + + &target=aggregateSeriesLists(mining.{carbon,graphite,diamond}.extracted,mining.{carbon,graphite,diamond}.shipped, 'sum') + + An example above would be the same as running :py:func:`aggregate ` for each member of the list: + + .. code-block:: none + + ?target=aggregate(mining.carbon.extracted,mining.carbon.shipped, 'sum') + &target=aggregate(mining.graphite.extracted,mining.graphite.shipped, 'sum') + &target=aggregate(mining.diamond.extracted,mining.diamond.shipped, 'sum') + + This function can be used with aggregation functions ``average`` (or ``avg``), ``avg_zero``, + ``median``, ``sum`` (or ``total``), ``min``, ``max``, ``diff``, ``stddev``, ``count``, + ``range`` (or ``rangeOf``) , ``multiply`` & ``last`` (or ``current``). + """ + if len(seriesListFirstPos) != len(seriesListSecondPos): + raise InputParameterError( + "seriesListFirstPos and seriesListSecondPos argument must have equal length") + results = [] + + for i in range(0, len(seriesListFirstPos)): + firstSeries = seriesListFirstPos[i] + secondSeries = seriesListSecondPos[i] + aggregated = aggregate(requestContext, (firstSeries, secondSeries), func, xFilesFactor=xFilesFactor) + if not aggregated: # empty list, no data found + continue + result = aggregated[0] # aggregate() can only return len 1 list + result.name = result.name[:result.name.find('Series(')] + 'Series(%s,%s)' % (firstSeries.name, secondSeries.name) + results.append(result) + return results + + +aggregateSeriesLists.group = 'Combine' +aggregateSeriesLists.params = [ + Param('seriesListFirstPos', ParamTypes.seriesList, required=True), + Param('seriesListSecondPos', ParamTypes.seriesList, required=True), + Param('func', ParamTypes.aggFunc, required=True), + Param('xFilesFactor', ParamTypes.float), +] + + def sumSeries(requestContext, *seriesLists): """ Short form: sum() @@ -235,9 +311,45 @@ def sumSeries(requestContext, *seriesLists): sumSeries.aggregator = True +def sumSeriesLists(requestContext, seriesListFirstPos, seriesListSecondPos): + """ + Iterates over a two lists and subtracts series lists 2 through n from series 1 + list1[0] to list2[0], list1[1] to list2[1] and so on. + The lists will need to be the same length + + Example: + + .. code-block:: none + + &target=sumSeriesLists(mining.{carbon,graphite,diamond}.extracted,mining.{carbon,graphite,diamond}.shipped) + + An example above would be the same as running :py:func:`sumSeries ` for each member of the list: + + .. code-block:: none + + ?target=sumSeries(mining.carbon.extracted,mining.carbon.shipped) + &target=sumSeries(mining.graphite.extracted,mining.graphite.shipped) + &target=sumSeries(mining.diamond.extracted,mining.diamond.shipped) + + This is an alias for :py:func:`aggregateSeriesLists ` with aggregation ``sum``. + """ + return aggregateSeriesLists(requestContext, seriesListFirstPos, seriesListSecondPos, 'sum') + + +sumSeriesLists.group = 'Combine' +sumSeriesLists.params = [ + Param('seriesListFirstPos', ParamTypes.seriesList, required=True), + Param('seriesListSecondPos', ParamTypes.seriesList, required=True), +] +sumSeriesLists.aggregator = True + + def sumSeriesWithWildcards(requestContext, seriesList, *position): #XXX """ - Call sumSeries after inserting wildcards at the given position(s). + Categorizes the provided series in groups by name, by ignoring + ("wildcarding") the given position(s) and calls sumSeries on each group. + Important: the introduction of wildcards only happens *after* retrieving + the input. Example: @@ -266,7 +378,10 @@ def sumSeriesWithWildcards(requestContext, seriesList, *position): #XXX def averageSeriesWithWildcards(requestContext, seriesList, *position): #XXX """ - Call averageSeries after inserting wildcards at the given position(s). + Categorizes the provided series in groups by name, by ignoring + ("wildcarding") the given position(s) and calls averageSeries on each group. + Important: the introduction of wildcards only happens *after* retrieving + the input. Example: @@ -278,7 +393,7 @@ def averageSeriesWithWildcards(requestContext, seriesList, *position): #XXX .. code-block:: none - &target=averageSeries(host.*.cpu-user.value)&target=averageSeries(host.*.cpu-system.value) + &target=averageSeries(host.cpu-[0-7].cpu-user.value)&target=averageSeries(host.cpu-[0-7].cpu-system.value) This is an alias for :py:func:`aggregateWithWildcards ` with aggregation ``average``. """ @@ -295,7 +410,10 @@ def averageSeriesWithWildcards(requestContext, seriesList, *position): #XXX def multiplySeriesWithWildcards(requestContext, seriesList, *position): #XXX """ - Call multiplySeries after inserting wildcards at the given position(s). + Categorizes the provided series in groups by name, by ignoring + ("wildcarding") the given position(s) and calls multiplySeries on each group. + Important: the introduction of wildcards only happens *after* retrieving + the input. Example: @@ -402,6 +520,40 @@ def diffSeries(requestContext, *seriesLists): diffSeries.aggregator = True +def diffSeriesLists(requestContext, seriesListFirstPos, seriesListSecondPos): + """ + Iterates over a two lists and subtracts series lists 2 through n from series 1 + list1[0] to list2[0], list1[1] to list2[1] and so on. + The lists will need to be the same length + + Example: + + .. code-block:: none + + &target=diffSeriesLists(mining.{carbon,graphite,diamond}.extracted,mining.{carbon,graphite,diamond}.shipped) + + An example above would be the same as running :py:func:`diffSeries ` for each member of the list: + + .. code-block:: none + + ?target=diffSeries(mining.carbon.extracted,mining.carbon.shipped) + &target=diffSeries(mining.graphite.extracted,mining.graphite.shipped) + &target=diffSeries(mining.diamond.extracted,mining.diamond.shipped) + + + This is an alias for :py:func:`aggregateSeriesLists ` with aggregation ``diff``. + """ + return aggregateSeriesLists(requestContext, seriesListFirstPos, seriesListSecondPos, 'diff') + + +diffSeriesLists.group = 'Combine' +diffSeriesLists.params = [ + Param('seriesListFirstPos', ParamTypes.seriesList, required=True), + Param('seriesListSecondPos', ParamTypes.seriesList, required=True), +] +diffSeriesLists.aggregator = True + + def averageSeries(requestContext, *seriesLists): """ Short Alias: avg() @@ -871,6 +1023,7 @@ def asPercent(requestContext, seriesList, total=None, *nodes): Param('total', ParamTypes.any), Param('nodes', ParamTypes.nodeOrTag, multiple=True), ] +asPercent.aggregator = True def divideSeriesLists(requestContext, dividendSeriesList, divisorSeriesList): @@ -996,6 +1149,39 @@ def multiplySeries(requestContext, *seriesLists): multiplySeries.aggregator = True +def multiplySeriesLists(requestContext, seriesListFirstPos, seriesListSecondPos): + """ + Iterates over a two lists and subtracts series lists 2 through n from series 1 + list1[0] to list2[0], list1[1] to list2[1] and so on. + The lists will need to be the same length + + Example: + + .. code-block:: none + + &target=multiplySeriesLists(mining.{carbon,graphite,diamond}.extracted,mining.{carbon,graphite,diamond}.shipped) + + An example above would be the same as running :py:func:`multiplySeries ` for each member of the list: + + .. code-block:: none + + ?target=multiplySeries(mining.carbon.extracted,mining.carbon.shipped) + &target=multiplySeries(mining.graphite.extracted,mining.graphite.shipped) + &target=multiplySeries(mining.diamond.extracted,mining.diamond.shipped) + + This is an alias for :py:func:`aggregateSeriesLists ` with aggregation ``multiply``. + """ + return aggregateSeriesLists(requestContext, seriesListFirstPos, seriesListSecondPos, 'multiply') + + +multiplySeriesLists.group = 'Combine' +multiplySeriesLists.params = [ + Param('seriesListFirstPos', ParamTypes.seriesList, required=True), + Param('seriesListSecondPos', ParamTypes.seriesList, required=True), +] +multiplySeriesLists.aggregator = True + + def weightedAverage(requestContext, seriesListAvg, seriesListWeight, *nodes): """ Takes a series of average values and a series of weights and @@ -1012,6 +1198,9 @@ def weightedAverage(requestContext, seriesListAvg, seriesListWeight, *nodes): """ sortedSeries={} + if len(seriesListAvg) != len(seriesListWeight): + raise InputParameterError('weightedAverage must receive the same number of series for seriesListAvg and seriesListWeight but received respectively %d and %d' % (len(seriesListAvg), len(seriesListWeight))) + for seriesAvg, seriesWeight in izip_longest(seriesListAvg , seriesListWeight): key = aggKey(seriesAvg, nodes) @@ -1509,7 +1698,7 @@ def powSeries(requestContext, *seriesLists): return [series] -powSeries.group = 'Transform' +powSeries.group = 'Combine' powSeries.params = [ Param('seriesList', ParamTypes.seriesList, required=True, multiple=True), ] @@ -3966,7 +4155,7 @@ def holtWintersConfidenceArea(requestContext, seriesList, delta=3, bootstrapInte ] -def linearRegressionAnalysis(series): +def _linearRegressionAnalysis(series): """ Returns factor and offset of linear regression function by least squares method. """ @@ -4019,7 +4208,7 @@ def linearRegression(requestContext, seriesList, startSourceAt=None, endSourceAt int(time.mktime(sourceContext['startTime'].timetuple())), int(time.mktime(sourceContext['endTime'].timetuple())) ) - forecast = linearRegressionAnalysis(source) + forecast = _linearRegressionAnalysis(source) if forecast is None: continue factor, offset = forecast @@ -4559,7 +4748,7 @@ def transform(v): return seriesList -isNonNull.group = 'Combine' +isNonNull.group = 'Transform' isNonNull.params = [ Param('seriesList', ParamTypes.seriesList, required=True), ] @@ -4909,7 +5098,7 @@ def groupByNodes(requestContext, seriesList, callback, *nodes): groupByNodes.params = [ Param('seriesList', ParamTypes.seriesList, required=True), Param('callback', ParamTypes.aggOrSeriesFunc, required=True), - Param('nodes', ParamTypes.nodeOrTag, required=True, multiple=True), + Param('nodes', ParamTypes.nodeOrTag, multiple=True), ] @@ -5018,7 +5207,10 @@ def smartSummarize(requestContext, seriesList, intervalString, func='sum', align Param('seriesList', ParamTypes.seriesList, required=True), Param('intervalString', ParamTypes.interval, required=True, suggestions=['10min', '1h', '1d']), Param('func', ParamTypes.aggFunc, default='sum'), - Param('alignTo', ParamTypes.string, options=[None, YEARS_STRING, MONTHS_STRING, WEEKS_STRING, DAYS_STRING, HOURS_STRING, MINUTES_STRING, SECONDS_STRING]), + + # the options True and False are only part of this list for backwards + # compatibility and get ignored if specified + Param('alignTo', ParamTypes.string, options=[None, YEARS_STRING, MONTHS_STRING, WEEKS_STRING, DAYS_STRING, HOURS_STRING, MINUTES_STRING, SECONDS_STRING, True, False]), ] @@ -5646,6 +5838,7 @@ def pieMinimum(requestContext, series): SeriesFunctions = { # Combine functions 'aggregate': aggregate, + 'aggregateSeriesLists': aggregateSeriesLists, 'aggregateWithWildcards': aggregateWithWildcards, 'applyByNode': applyByNode, 'asPercent': asPercent, @@ -5654,6 +5847,7 @@ def pieMinimum(requestContext, series): 'avg': averageSeries, 'countSeries': countSeries, 'diffSeries': diffSeries, + 'diffSeriesLists': diffSeriesLists, 'divideSeries': divideSeries, 'divideSeriesLists': divideSeriesLists, 'group': group, @@ -5666,6 +5860,7 @@ def pieMinimum(requestContext, series): 'maxSeries': maxSeries, 'minSeries': minSeries, 'multiplySeries': multiplySeries, + 'multiplySeriesLists': multiplySeriesLists, 'multiplySeriesWithWildcards': multiplySeriesWithWildcards, 'pct': asPercent, 'percentileOfSeries': percentileOfSeries, @@ -5675,6 +5870,7 @@ def pieMinimum(requestContext, series): 'stddevSeries': stddevSeries, 'sum': sumSeries, 'sumSeries': sumSeries, + 'sumSeriesLists': sumSeriesLists, 'sumSeriesWithWildcards': sumSeriesWithWildcards, 'weightedAverage': weightedAverage, diff --git a/webapp/graphite/render/glyph.py b/webapp/graphite/render/glyph.py index 2851321ab..bdf2c514d 100644 --- a/webapp/graphite/render/glyph.py +++ b/webapp/graphite/render/glyph.py @@ -796,6 +796,12 @@ def drawLegend(self, elements, unique=False): #elements is [ (name,color,rightSi y += lineHeight def encodeHeader(self,text): + """ + Puts some metadata in the generated svg xml that is not inside the frame. + This can be used to manipulate the svg later on with a framework like d3. + """ + if self.outputFormat != 'svg': + return self.ctx.save() self.setColor( self.backgroundColor ) self.ctx.move_to(-88,-88) # identifier @@ -885,10 +891,10 @@ def output(self, fileObj): metaData = { } self.surface.finish() - svgData = str(self.surfaceData.getvalue()) + svgData = self.surfaceData.getvalue().decode('utf-8') self.surfaceData.close() - svgData = svgData.replace('pt"', 'px"', 2) # we expect height/width in pixels, not points + svgData = svgData.replace('pt"', 'px"', 2) # we expect height/width in pixels, not points svgData = svgData.replace('\n', '', 1) svgData = svgData.replace('\n\n', onHeaderPath, svgData) # Replace the first with , and close out the last at the end - svgData = svgData.replace(' 0: - svgData += "" - svgData = svgData.replace(' data-header="true"','') + svgData += '' + svgData = svgData.replace(' data-header="true"', '') fileObj.write(svgData.encode('utf-8')) fileObj.write(("""